Show More
@@ -1,427 +1,429 b'' | |||||
1 | """Base Tornado handlers for the notebook server.""" |
|
1 | """Base Tornado handlers for the notebook server.""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | import functools |
|
6 | import functools | |
7 | import json |
|
7 | import json | |
8 | import logging |
|
8 | import logging | |
9 | import os |
|
9 | import os | |
10 | import re |
|
10 | import re | |
11 | import sys |
|
11 | import sys | |
12 | import traceback |
|
12 | import traceback | |
13 | try: |
|
13 | try: | |
14 | # py3 |
|
14 | # py3 | |
15 | from http.client import responses |
|
15 | from http.client import responses | |
16 | except ImportError: |
|
16 | except ImportError: | |
17 | from httplib import responses |
|
17 | from httplib import responses | |
18 |
|
18 | |||
19 | from jinja2 import TemplateNotFound |
|
19 | from jinja2 import TemplateNotFound | |
20 | from tornado import web |
|
20 | from tornado import web | |
21 |
|
21 | |||
22 | try: |
|
22 | try: | |
23 | from tornado.log import app_log |
|
23 | from tornado.log import app_log | |
24 | except ImportError: |
|
24 | except ImportError: | |
25 | app_log = logging.getLogger() |
|
25 | app_log = logging.getLogger() | |
26 |
|
26 | |||
27 | from IPython.config import Application |
|
27 | from IPython.config import Application | |
28 | from IPython.utils.path import filefind |
|
28 | from IPython.utils.path import filefind | |
29 | from IPython.utils.py3compat import string_types |
|
29 | from IPython.utils.py3compat import string_types | |
30 | from IPython.html.utils import is_hidden |
|
30 | from IPython.html.utils import is_hidden | |
31 |
|
31 | |||
32 | #----------------------------------------------------------------------------- |
|
32 | #----------------------------------------------------------------------------- | |
33 | # Top-level handlers |
|
33 | # Top-level handlers | |
34 | #----------------------------------------------------------------------------- |
|
34 | #----------------------------------------------------------------------------- | |
35 | non_alphanum = re.compile(r'[^A-Za-z0-9]') |
|
35 | non_alphanum = re.compile(r'[^A-Za-z0-9]') | |
36 |
|
36 | |||
37 | class AuthenticatedHandler(web.RequestHandler): |
|
37 | class AuthenticatedHandler(web.RequestHandler): | |
38 | """A RequestHandler with an authenticated user.""" |
|
38 | """A RequestHandler with an authenticated user.""" | |
39 |
|
39 | |||
40 | def set_default_headers(self): |
|
40 | def set_default_headers(self): | |
41 | headers = self.settings.get('headers', {}) |
|
41 | headers = self.settings.get('headers', {}) | |
42 |
|
42 | |||
43 | if "X-Frame-Options" not in headers: |
|
43 | if "X-Frame-Options" not in headers: | |
44 | headers["X-Frame-Options"] = "SAMEORIGIN" |
|
44 | headers["X-Frame-Options"] = "SAMEORIGIN" | |
45 |
|
45 | |||
46 | for header_name,value in headers.items() : |
|
46 | for header_name,value in headers.items() : | |
47 | try: |
|
47 | try: | |
48 | self.set_header(header_name, value) |
|
48 | self.set_header(header_name, value) | |
49 | except Exception: |
|
49 | except Exception: | |
50 | # tornado raise Exception (not a subclass) |
|
50 | # tornado raise Exception (not a subclass) | |
51 | # if method is unsupported (websocket and Access-Control-Allow-Origin |
|
51 | # if method is unsupported (websocket and Access-Control-Allow-Origin | |
52 | # for example, so just ignore) |
|
52 | # for example, so just ignore) | |
53 | pass |
|
53 | pass | |
54 |
|
54 | |||
55 | def clear_login_cookie(self): |
|
55 | def clear_login_cookie(self): | |
56 | self.clear_cookie(self.cookie_name) |
|
56 | self.clear_cookie(self.cookie_name) | |
57 |
|
57 | |||
58 | def get_current_user(self): |
|
58 | def get_current_user(self): | |
59 | user_id = self.get_secure_cookie(self.cookie_name) |
|
59 | user_id = self.get_secure_cookie(self.cookie_name) | |
60 | # For now the user_id should not return empty, but it could eventually |
|
60 | # For now the user_id should not return empty, but it could eventually | |
61 | if user_id == '': |
|
61 | if user_id == '': | |
62 | user_id = 'anonymous' |
|
62 | user_id = 'anonymous' | |
63 | if user_id is None: |
|
63 | if user_id is None: | |
64 | # prevent extra Invalid cookie sig warnings: |
|
64 | # prevent extra Invalid cookie sig warnings: | |
65 | self.clear_login_cookie() |
|
65 | self.clear_login_cookie() | |
66 | if not self.login_available: |
|
66 | if not self.login_available: | |
67 | user_id = 'anonymous' |
|
67 | user_id = 'anonymous' | |
68 | return user_id |
|
68 | return user_id | |
69 |
|
69 | |||
70 | @property |
|
70 | @property | |
71 | def cookie_name(self): |
|
71 | def cookie_name(self): | |
72 | default_cookie_name = non_alphanum.sub('-', 'username-{}'.format( |
|
72 | default_cookie_name = non_alphanum.sub('-', 'username-{}'.format( | |
73 | self.request.host |
|
73 | self.request.host | |
74 | )) |
|
74 | )) | |
75 | return self.settings.get('cookie_name', default_cookie_name) |
|
75 | return self.settings.get('cookie_name', default_cookie_name) | |
76 |
|
76 | |||
77 | @property |
|
77 | @property | |
78 | def password(self): |
|
78 | def password(self): | |
79 | """our password""" |
|
79 | """our password""" | |
80 | return self.settings.get('password', '') |
|
80 | return self.settings.get('password', '') | |
81 |
|
81 | |||
82 | @property |
|
82 | @property | |
83 | def logged_in(self): |
|
83 | def logged_in(self): | |
84 | """Is a user currently logged in? |
|
84 | """Is a user currently logged in? | |
85 |
|
85 | |||
86 | """ |
|
86 | """ | |
87 | user = self.get_current_user() |
|
87 | user = self.get_current_user() | |
88 | return (user and not user == 'anonymous') |
|
88 | return (user and not user == 'anonymous') | |
89 |
|
89 | |||
90 | @property |
|
90 | @property | |
91 | def login_available(self): |
|
91 | def login_available(self): | |
92 | """May a user proceed to log in? |
|
92 | """May a user proceed to log in? | |
93 |
|
93 | |||
94 | This returns True if login capability is available, irrespective of |
|
94 | This returns True if login capability is available, irrespective of | |
95 | whether the user is already logged in or not. |
|
95 | whether the user is already logged in or not. | |
96 |
|
96 | |||
97 | """ |
|
97 | """ | |
98 | return bool(self.settings.get('password', '')) |
|
98 | return bool(self.settings.get('password', '')) | |
99 |
|
99 | |||
100 |
|
100 | |||
101 | class IPythonHandler(AuthenticatedHandler): |
|
101 | class IPythonHandler(AuthenticatedHandler): | |
102 | """IPython-specific extensions to authenticated handling |
|
102 | """IPython-specific extensions to authenticated handling | |
103 |
|
103 | |||
104 | Mostly property shortcuts to IPython-specific settings. |
|
104 | Mostly property shortcuts to IPython-specific settings. | |
105 | """ |
|
105 | """ | |
106 |
|
106 | |||
107 | @property |
|
107 | @property | |
108 | def config(self): |
|
108 | def config(self): | |
109 | return self.settings.get('config', None) |
|
109 | return self.settings.get('config', None) | |
110 |
|
110 | |||
111 | @property |
|
111 | @property | |
112 | def log(self): |
|
112 | def log(self): | |
113 | """use the IPython log by default, falling back on tornado's logger""" |
|
113 | """use the IPython log by default, falling back on tornado's logger""" | |
114 | if Application.initialized(): |
|
114 | if Application.initialized(): | |
115 | return Application.instance().log |
|
115 | return Application.instance().log | |
116 | else: |
|
116 | else: | |
117 | return app_log |
|
117 | return app_log | |
118 |
|
118 | |||
119 | #--------------------------------------------------------------- |
|
119 | #--------------------------------------------------------------- | |
120 | # URLs |
|
120 | # URLs | |
121 | #--------------------------------------------------------------- |
|
121 | #--------------------------------------------------------------- | |
122 |
|
122 | |||
123 | @property |
|
123 | @property | |
124 | def mathjax_url(self): |
|
124 | def mathjax_url(self): | |
125 | return self.settings.get('mathjax_url', '') |
|
125 | return self.settings.get('mathjax_url', '') | |
126 |
|
126 | |||
127 | @property |
|
127 | @property | |
128 | def base_url(self): |
|
128 | def base_url(self): | |
129 | return self.settings.get('base_url', '/') |
|
129 | return self.settings.get('base_url', '/') | |
130 |
|
130 | |||
131 | @property |
|
131 | @property | |
132 | def ws_url(self): |
|
132 | def ws_url(self): | |
133 | return self.settings.get('websocket_url', '') |
|
133 | return self.settings.get('websocket_url', '') | |
134 |
|
134 | |||
135 | #--------------------------------------------------------------- |
|
135 | #--------------------------------------------------------------- | |
136 | # Manager objects |
|
136 | # Manager objects | |
137 | #--------------------------------------------------------------- |
|
137 | #--------------------------------------------------------------- | |
138 |
|
138 | |||
139 | @property |
|
139 | @property | |
140 | def kernel_manager(self): |
|
140 | def kernel_manager(self): | |
141 | return self.settings['kernel_manager'] |
|
141 | return self.settings['kernel_manager'] | |
142 |
|
142 | |||
143 | @property |
|
143 | @property | |
144 | def contents_manager(self): |
|
144 | def contents_manager(self): | |
145 | return self.settings['contents_manager'] |
|
145 | return self.settings['contents_manager'] | |
146 |
|
146 | |||
147 | @property |
|
147 | @property | |
148 | def cluster_manager(self): |
|
148 | def cluster_manager(self): | |
149 | return self.settings['cluster_manager'] |
|
149 | return self.settings['cluster_manager'] | |
150 |
|
150 | |||
151 | @property |
|
151 | @property | |
152 | def session_manager(self): |
|
152 | def session_manager(self): | |
153 | return self.settings['session_manager'] |
|
153 | return self.settings['session_manager'] | |
154 |
|
154 | |||
155 | @property |
|
155 | @property | |
156 | def kernel_spec_manager(self): |
|
156 | def kernel_spec_manager(self): | |
157 | return self.settings['kernel_spec_manager'] |
|
157 | return self.settings['kernel_spec_manager'] | |
158 |
|
158 | |||
159 | @property |
|
159 | @property | |
160 | def project_dir(self): |
|
160 | def project_dir(self): | |
161 | return getattr(self.contents_manager, 'root_dir', '/') |
|
161 | return getattr(self.contents_manager, 'root_dir', '/') | |
162 |
|
162 | |||
163 | #--------------------------------------------------------------- |
|
163 | #--------------------------------------------------------------- | |
164 | # CORS |
|
164 | # CORS | |
165 | #--------------------------------------------------------------- |
|
165 | #--------------------------------------------------------------- | |
166 |
|
166 | |||
167 | @property |
|
167 | @property | |
168 | def allow_origin(self): |
|
168 | def allow_origin(self): | |
169 | """Normal Access-Control-Allow-Origin""" |
|
169 | """Normal Access-Control-Allow-Origin""" | |
170 | return self.settings.get('allow_origin', '') |
|
170 | return self.settings.get('allow_origin', '') | |
171 |
|
171 | |||
172 | @property |
|
172 | @property | |
173 | def allow_origin_pat(self): |
|
173 | def allow_origin_pat(self): | |
174 | """Regular expression version of allow_origin""" |
|
174 | """Regular expression version of allow_origin""" | |
175 | return self.settings.get('allow_origin_pat', None) |
|
175 | return self.settings.get('allow_origin_pat', None) | |
176 |
|
176 | |||
177 | @property |
|
177 | @property | |
178 | def allow_credentials(self): |
|
178 | def allow_credentials(self): | |
179 | """Whether to set Access-Control-Allow-Credentials""" |
|
179 | """Whether to set Access-Control-Allow-Credentials""" | |
180 | return self.settings.get('allow_credentials', False) |
|
180 | return self.settings.get('allow_credentials', False) | |
181 |
|
181 | |||
182 | def set_default_headers(self): |
|
182 | def set_default_headers(self): | |
183 | """Add CORS headers, if defined""" |
|
183 | """Add CORS headers, if defined""" | |
184 | super(IPythonHandler, self).set_default_headers() |
|
184 | super(IPythonHandler, self).set_default_headers() | |
185 | if self.allow_origin: |
|
185 | if self.allow_origin: | |
186 | self.set_header("Access-Control-Allow-Origin", self.allow_origin) |
|
186 | self.set_header("Access-Control-Allow-Origin", self.allow_origin) | |
187 | elif self.allow_origin_pat: |
|
187 | elif self.allow_origin_pat: | |
188 | origin = self.get_origin() |
|
188 | origin = self.get_origin() | |
189 | if origin and self.allow_origin_pat.match(origin): |
|
189 | if origin and self.allow_origin_pat.match(origin): | |
190 | self.set_header("Access-Control-Allow-Origin", origin) |
|
190 | self.set_header("Access-Control-Allow-Origin", origin) | |
191 | if self.allow_credentials: |
|
191 | if self.allow_credentials: | |
192 | self.set_header("Access-Control-Allow-Credentials", 'true') |
|
192 | self.set_header("Access-Control-Allow-Credentials", 'true') | |
193 |
|
193 | |||
194 | def get_origin(self): |
|
194 | def get_origin(self): | |
195 | # Handle WebSocket Origin naming convention differences |
|
195 | # Handle WebSocket Origin naming convention differences | |
196 | # The difference between version 8 and 13 is that in 8 the |
|
196 | # The difference between version 8 and 13 is that in 8 the | |
197 | # client sends a "Sec-Websocket-Origin" header and in 13 it's |
|
197 | # client sends a "Sec-Websocket-Origin" header and in 13 it's | |
198 | # simply "Origin". |
|
198 | # simply "Origin". | |
199 | if "Origin" in self.request.headers: |
|
199 | if "Origin" in self.request.headers: | |
200 | origin = self.request.headers.get("Origin") |
|
200 | origin = self.request.headers.get("Origin") | |
201 | else: |
|
201 | else: | |
202 | origin = self.request.headers.get("Sec-Websocket-Origin", None) |
|
202 | origin = self.request.headers.get("Sec-Websocket-Origin", None) | |
203 | return origin |
|
203 | return origin | |
204 |
|
204 | |||
205 | #--------------------------------------------------------------- |
|
205 | #--------------------------------------------------------------- | |
206 | # template rendering |
|
206 | # template rendering | |
207 | #--------------------------------------------------------------- |
|
207 | #--------------------------------------------------------------- | |
208 |
|
208 | |||
209 | def get_template(self, name): |
|
209 | def get_template(self, name): | |
210 | """Return the jinja template object for a given name""" |
|
210 | """Return the jinja template object for a given name""" | |
211 | return self.settings['jinja2_env'].get_template(name) |
|
211 | return self.settings['jinja2_env'].get_template(name) | |
212 |
|
212 | |||
213 | def render_template(self, name, **ns): |
|
213 | def render_template(self, name, **ns): | |
214 | ns.update(self.template_namespace) |
|
214 | ns.update(self.template_namespace) | |
215 | template = self.get_template(name) |
|
215 | template = self.get_template(name) | |
216 | return template.render(**ns) |
|
216 | return template.render(**ns) | |
217 |
|
217 | |||
218 | @property |
|
218 | @property | |
219 | def template_namespace(self): |
|
219 | def template_namespace(self): | |
220 | return dict( |
|
220 | return dict( | |
221 | base_url=self.base_url, |
|
221 | base_url=self.base_url, | |
222 | ws_url=self.ws_url, |
|
222 | ws_url=self.ws_url, | |
223 | logged_in=self.logged_in, |
|
223 | logged_in=self.logged_in, | |
224 | login_available=self.login_available, |
|
224 | login_available=self.login_available, | |
225 | static_url=self.static_url, |
|
225 | static_url=self.static_url, | |
226 | ) |
|
226 | ) | |
227 |
|
227 | |||
228 | def get_json_body(self): |
|
228 | def get_json_body(self): | |
229 | """Return the body of the request as JSON data.""" |
|
229 | """Return the body of the request as JSON data.""" | |
230 | if not self.request.body: |
|
230 | if not self.request.body: | |
231 | return None |
|
231 | return None | |
232 | # Do we need to call body.decode('utf-8') here? |
|
232 | # Do we need to call body.decode('utf-8') here? | |
233 | body = self.request.body.strip().decode(u'utf-8') |
|
233 | body = self.request.body.strip().decode(u'utf-8') | |
234 | try: |
|
234 | try: | |
235 | model = json.loads(body) |
|
235 | model = json.loads(body) | |
236 | except Exception: |
|
236 | except Exception: | |
237 | self.log.debug("Bad JSON: %r", body) |
|
237 | self.log.debug("Bad JSON: %r", body) | |
238 | self.log.error("Couldn't parse JSON", exc_info=True) |
|
238 | self.log.error("Couldn't parse JSON", exc_info=True) | |
239 | raise web.HTTPError(400, u'Invalid JSON in body of request') |
|
239 | raise web.HTTPError(400, u'Invalid JSON in body of request') | |
240 | return model |
|
240 | return model | |
241 |
|
241 | |||
242 | def get_error_html(self, status_code, **kwargs): |
|
242 | def get_error_html(self, status_code, **kwargs): | |
243 | """render custom error pages""" |
|
243 | """render custom error pages""" | |
244 | exception = kwargs.get('exception') |
|
244 | exception = kwargs.get('exception') | |
245 | message = '' |
|
245 | message = '' | |
246 | status_message = responses.get(status_code, 'Unknown HTTP Error') |
|
246 | status_message = responses.get(status_code, 'Unknown HTTP Error') | |
247 | if exception: |
|
247 | if exception: | |
248 | # get the custom message, if defined |
|
248 | # get the custom message, if defined | |
249 | try: |
|
249 | try: | |
250 | message = exception.log_message % exception.args |
|
250 | message = exception.log_message % exception.args | |
251 | except Exception: |
|
251 | except Exception: | |
252 | pass |
|
252 | pass | |
253 |
|
253 | |||
254 | # construct the custom reason, if defined |
|
254 | # construct the custom reason, if defined | |
255 | reason = getattr(exception, 'reason', '') |
|
255 | reason = getattr(exception, 'reason', '') | |
256 | if reason: |
|
256 | if reason: | |
257 | status_message = reason |
|
257 | status_message = reason | |
258 |
|
258 | |||
259 | # build template namespace |
|
259 | # build template namespace | |
260 | ns = dict( |
|
260 | ns = dict( | |
261 | status_code=status_code, |
|
261 | status_code=status_code, | |
262 | status_message=status_message, |
|
262 | status_message=status_message, | |
263 | message=message, |
|
263 | message=message, | |
264 | exception=exception, |
|
264 | exception=exception, | |
265 | ) |
|
265 | ) | |
266 |
|
266 | |||
267 | # render the template |
|
267 | # render the template | |
268 | try: |
|
268 | try: | |
269 | html = self.render_template('%s.html' % status_code, **ns) |
|
269 | html = self.render_template('%s.html' % status_code, **ns) | |
270 | except TemplateNotFound: |
|
270 | except TemplateNotFound: | |
271 | self.log.debug("No template for %d", status_code) |
|
271 | self.log.debug("No template for %d", status_code) | |
272 | html = self.render_template('error.html', **ns) |
|
272 | html = self.render_template('error.html', **ns) | |
273 | return html |
|
273 | return html | |
274 |
|
274 | |||
275 |
|
275 | |||
276 | class Template404(IPythonHandler): |
|
276 | class Template404(IPythonHandler): | |
277 | """Render our 404 template""" |
|
277 | """Render our 404 template""" | |
278 | def prepare(self): |
|
278 | def prepare(self): | |
279 | raise web.HTTPError(404) |
|
279 | raise web.HTTPError(404) | |
280 |
|
280 | |||
281 |
|
281 | |||
282 | class AuthenticatedFileHandler(IPythonHandler, web.StaticFileHandler): |
|
282 | class AuthenticatedFileHandler(IPythonHandler, web.StaticFileHandler): | |
283 | """static files should only be accessible when logged in""" |
|
283 | """static files should only be accessible when logged in""" | |
284 |
|
284 | |||
285 | @web.authenticated |
|
285 | @web.authenticated | |
286 | def get(self, path): |
|
286 | def get(self, path): | |
287 | if os.path.splitext(path)[1] == '.ipynb': |
|
287 | if os.path.splitext(path)[1] == '.ipynb': | |
288 | name = os.path.basename(path) |
|
288 | name = os.path.basename(path) | |
289 | self.set_header('Content-Type', 'application/json') |
|
289 | self.set_header('Content-Type', 'application/json') | |
290 | self.set_header('Content-Disposition','attachment; filename="%s"' % name) |
|
290 | self.set_header('Content-Disposition','attachment; filename="%s"' % name) | |
291 |
|
291 | |||
292 | return web.StaticFileHandler.get(self, path) |
|
292 | return web.StaticFileHandler.get(self, path) | |
293 |
|
293 | |||
294 | def compute_etag(self): |
|
294 | def compute_etag(self): | |
295 | return None |
|
295 | return None | |
296 |
|
296 | |||
297 | def validate_absolute_path(self, root, absolute_path): |
|
297 | def validate_absolute_path(self, root, absolute_path): | |
298 | """Validate and return the absolute path. |
|
298 | """Validate and return the absolute path. | |
299 |
|
299 | |||
300 | Requires tornado 3.1 |
|
300 | Requires tornado 3.1 | |
301 |
|
301 | |||
302 | Adding to tornado's own handling, forbids the serving of hidden files. |
|
302 | Adding to tornado's own handling, forbids the serving of hidden files. | |
303 | """ |
|
303 | """ | |
304 | abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path(root, absolute_path) |
|
304 | abs_path = super(AuthenticatedFileHandler, self).validate_absolute_path(root, absolute_path) | |
305 | abs_root = os.path.abspath(root) |
|
305 | abs_root = os.path.abspath(root) | |
306 | if is_hidden(abs_path, abs_root): |
|
306 | if is_hidden(abs_path, abs_root): | |
307 | self.log.info("Refusing to serve hidden file, via 404 Error") |
|
307 | self.log.info("Refusing to serve hidden file, via 404 Error") | |
308 | raise web.HTTPError(404) |
|
308 | raise web.HTTPError(404) | |
309 | return abs_path |
|
309 | return abs_path | |
310 |
|
310 | |||
311 |
|
311 | |||
312 | def json_errors(method): |
|
312 | def json_errors(method): | |
313 | """Decorate methods with this to return GitHub style JSON errors. |
|
313 | """Decorate methods with this to return GitHub style JSON errors. | |
314 |
|
314 | |||
315 | This should be used on any JSON API on any handler method that can raise HTTPErrors. |
|
315 | This should be used on any JSON API on any handler method that can raise HTTPErrors. | |
316 |
|
316 | |||
317 | This will grab the latest HTTPError exception using sys.exc_info |
|
317 | This will grab the latest HTTPError exception using sys.exc_info | |
318 | and then: |
|
318 | and then: | |
319 |
|
319 | |||
320 | 1. Set the HTTP status code based on the HTTPError |
|
320 | 1. Set the HTTP status code based on the HTTPError | |
321 | 2. Create and return a JSON body with a message field describing |
|
321 | 2. Create and return a JSON body with a message field describing | |
322 | the error in a human readable form. |
|
322 | the error in a human readable form. | |
323 | """ |
|
323 | """ | |
324 | @functools.wraps(method) |
|
324 | @functools.wraps(method) | |
325 | def wrapper(self, *args, **kwargs): |
|
325 | def wrapper(self, *args, **kwargs): | |
326 | try: |
|
326 | try: | |
327 | result = method(self, *args, **kwargs) |
|
327 | result = method(self, *args, **kwargs) | |
328 | except web.HTTPError as e: |
|
328 | except web.HTTPError as e: | |
329 | status = e.status_code |
|
329 | status = e.status_code | |
330 | message = e.log_message |
|
330 | message = e.log_message | |
331 | self.log.warn(message) |
|
331 | self.log.warn(message) | |
332 | self.set_status(e.status_code) |
|
332 | self.set_status(e.status_code) | |
333 | self.finish(json.dumps(dict(message=message))) |
|
333 | self.finish(json.dumps(dict(message=message))) | |
334 | except Exception: |
|
334 | except Exception: | |
335 | self.log.error("Unhandled error in API request", exc_info=True) |
|
335 | self.log.error("Unhandled error in API request", exc_info=True) | |
336 | status = 500 |
|
336 | status = 500 | |
337 | message = "Unknown server error" |
|
337 | message = "Unknown server error" | |
338 | t, value, tb = sys.exc_info() |
|
338 | t, value, tb = sys.exc_info() | |
339 | self.set_status(status) |
|
339 | self.set_status(status) | |
340 | tb_text = ''.join(traceback.format_exception(t, value, tb)) |
|
340 | tb_text = ''.join(traceback.format_exception(t, value, tb)) | |
341 | reply = dict(message=message, traceback=tb_text) |
|
341 | reply = dict(message=message, traceback=tb_text) | |
342 | self.finish(json.dumps(reply)) |
|
342 | self.finish(json.dumps(reply)) | |
343 | else: |
|
343 | else: | |
344 | return result |
|
344 | return result | |
345 | return wrapper |
|
345 | return wrapper | |
346 |
|
346 | |||
347 |
|
347 | |||
348 |
|
348 | |||
349 | #----------------------------------------------------------------------------- |
|
349 | #----------------------------------------------------------------------------- | |
350 | # File handler |
|
350 | # File handler | |
351 | #----------------------------------------------------------------------------- |
|
351 | #----------------------------------------------------------------------------- | |
352 |
|
352 | |||
353 | # to minimize subclass changes: |
|
353 | # to minimize subclass changes: | |
354 | HTTPError = web.HTTPError |
|
354 | HTTPError = web.HTTPError | |
355 |
|
355 | |||
356 | class FileFindHandler(web.StaticFileHandler): |
|
356 | class FileFindHandler(web.StaticFileHandler): | |
357 | """subclass of StaticFileHandler for serving files from a search path""" |
|
357 | """subclass of StaticFileHandler for serving files from a search path""" | |
358 |
|
358 | |||
359 | # cache search results, don't search for files more than once |
|
359 | # cache search results, don't search for files more than once | |
360 | _static_paths = {} |
|
360 | _static_paths = {} | |
361 |
|
361 | |||
362 | def initialize(self, path, default_filename=None): |
|
362 | def initialize(self, path, default_filename=None): | |
363 | if isinstance(path, string_types): |
|
363 | if isinstance(path, string_types): | |
364 | path = [path] |
|
364 | path = [path] | |
365 |
|
365 | |||
366 | self.root = tuple( |
|
366 | self.root = tuple( | |
367 | os.path.abspath(os.path.expanduser(p)) + os.sep for p in path |
|
367 | os.path.abspath(os.path.expanduser(p)) + os.sep for p in path | |
368 | ) |
|
368 | ) | |
369 | self.default_filename = default_filename |
|
369 | self.default_filename = default_filename | |
370 |
|
370 | |||
371 | def compute_etag(self): |
|
371 | def compute_etag(self): | |
372 | return None |
|
372 | return None | |
373 |
|
373 | |||
374 | @classmethod |
|
374 | @classmethod | |
375 | def get_absolute_path(cls, roots, path): |
|
375 | def get_absolute_path(cls, roots, path): | |
376 | """locate a file to serve on our static file search path""" |
|
376 | """locate a file to serve on our static file search path""" | |
377 | with cls._lock: |
|
377 | with cls._lock: | |
378 | if path in cls._static_paths: |
|
378 | if path in cls._static_paths: | |
379 | return cls._static_paths[path] |
|
379 | return cls._static_paths[path] | |
380 | try: |
|
380 | try: | |
381 | abspath = os.path.abspath(filefind(path, roots)) |
|
381 | abspath = os.path.abspath(filefind(path, roots)) | |
382 | except IOError: |
|
382 | except IOError: | |
383 | # IOError means not found |
|
383 | # IOError means not found | |
384 | return '' |
|
384 | return '' | |
385 |
|
385 | |||
386 | cls._static_paths[path] = abspath |
|
386 | cls._static_paths[path] = abspath | |
387 | return abspath |
|
387 | return abspath | |
388 |
|
388 | |||
389 | def validate_absolute_path(self, root, absolute_path): |
|
389 | def validate_absolute_path(self, root, absolute_path): | |
390 | """check if the file should be served (raises 404, 403, etc.)""" |
|
390 | """check if the file should be served (raises 404, 403, etc.)""" | |
391 | if absolute_path == '': |
|
391 | if absolute_path == '': | |
392 | raise web.HTTPError(404) |
|
392 | raise web.HTTPError(404) | |
393 |
|
393 | |||
394 | for root in self.root: |
|
394 | for root in self.root: | |
395 | if (absolute_path + os.sep).startswith(root): |
|
395 | if (absolute_path + os.sep).startswith(root): | |
396 | break |
|
396 | break | |
397 |
|
397 | |||
398 | return super(FileFindHandler, self).validate_absolute_path(root, absolute_path) |
|
398 | return super(FileFindHandler, self).validate_absolute_path(root, absolute_path) | |
399 |
|
399 | |||
400 |
|
400 | |||
401 | class TrailingSlashHandler(web.RequestHandler): |
|
401 | class TrailingSlashHandler(web.RequestHandler): | |
402 | """Simple redirect handler that strips trailing slashes |
|
402 | """Simple redirect handler that strips trailing slashes | |
403 |
|
403 | |||
404 | This should be the first, highest priority handler. |
|
404 | This should be the first, highest priority handler. | |
405 | """ |
|
405 | """ | |
406 |
|
406 | |||
407 | SUPPORTED_METHODS = ['GET'] |
|
407 | SUPPORTED_METHODS = ['GET'] | |
408 |
|
408 | |||
409 | def get(self): |
|
409 | def get(self): | |
410 | self.redirect(self.request.uri.rstrip('/')) |
|
410 | self.redirect(self.request.uri.rstrip('/')) | |
411 |
|
411 | |||
412 | #----------------------------------------------------------------------------- |
|
412 | #----------------------------------------------------------------------------- | |
413 | # URL pattern fragments for re-use |
|
413 | # URL pattern fragments for re-use | |
414 | #----------------------------------------------------------------------------- |
|
414 | #----------------------------------------------------------------------------- | |
415 |
|
415 | |||
416 | path_regex = r"(?P<path>(?:/.*)*)" |
|
416 | path_regex = r"(?P<path>(?:/.*)*)" | |
417 | notebook_name_regex = r"(?P<name>[^/]+\.ipynb)" |
|
417 | notebook_name_regex = r"(?P<name>[^/]+\.ipynb)" | |
418 | notebook_path_regex = "%s/%s" % (path_regex, notebook_name_regex) |
|
418 | notebook_path_regex = "%s/%s" % (path_regex, notebook_name_regex) | |
|
419 | file_name_regex = r"(?P<name>[^/]+)" | |||
|
420 | file_path_regex = "%s/%s" % (path_regex, file_name_regex) | |||
419 |
|
421 | |||
420 | #----------------------------------------------------------------------------- |
|
422 | #----------------------------------------------------------------------------- | |
421 | # URL to handler mappings |
|
423 | # URL to handler mappings | |
422 | #----------------------------------------------------------------------------- |
|
424 | #----------------------------------------------------------------------------- | |
423 |
|
425 | |||
424 |
|
426 | |||
425 | default_handlers = [ |
|
427 | default_handlers = [ | |
426 | (r".*/", TrailingSlashHandler) |
|
428 | (r".*/", TrailingSlashHandler) | |
427 | ] |
|
429 | ] |
@@ -1,137 +1,142 b'' | |||||
|
1 | """Tornado handlers for nbconvert.""" | |||
|
2 | ||||
|
3 | # Copyright (c) IPython Development Team. | |||
|
4 | # Distributed under the terms of the Modified BSD License. | |||
|
5 | ||||
1 | import io |
|
6 | import io | |
2 | import os |
|
7 | import os | |
3 | import zipfile |
|
8 | import zipfile | |
4 |
|
9 | |||
5 | from tornado import web |
|
10 | from tornado import web | |
6 |
|
11 | |||
7 | from ..base.handlers import IPythonHandler, notebook_path_regex |
|
12 | from ..base.handlers import IPythonHandler, notebook_path_regex | |
8 | from IPython.nbformat.current import to_notebook_json |
|
13 | from IPython.nbformat.current import to_notebook_json | |
9 |
|
14 | |||
10 | from IPython.utils.py3compat import cast_bytes |
|
15 | from IPython.utils.py3compat import cast_bytes | |
11 |
|
16 | |||
12 | def find_resource_files(output_files_dir): |
|
17 | def find_resource_files(output_files_dir): | |
13 | files = [] |
|
18 | files = [] | |
14 | for dirpath, dirnames, filenames in os.walk(output_files_dir): |
|
19 | for dirpath, dirnames, filenames in os.walk(output_files_dir): | |
15 | files.extend([os.path.join(dirpath, f) for f in filenames]) |
|
20 | files.extend([os.path.join(dirpath, f) for f in filenames]) | |
16 | return files |
|
21 | return files | |
17 |
|
22 | |||
18 | def respond_zip(handler, name, output, resources): |
|
23 | def respond_zip(handler, name, output, resources): | |
19 | """Zip up the output and resource files and respond with the zip file. |
|
24 | """Zip up the output and resource files and respond with the zip file. | |
20 |
|
25 | |||
21 | Returns True if it has served a zip file, False if there are no resource |
|
26 | Returns True if it has served a zip file, False if there are no resource | |
22 | files, in which case we serve the plain output file. |
|
27 | files, in which case we serve the plain output file. | |
23 | """ |
|
28 | """ | |
24 | # Check if we have resource files we need to zip |
|
29 | # Check if we have resource files we need to zip | |
25 | output_files = resources.get('outputs', None) |
|
30 | output_files = resources.get('outputs', None) | |
26 | if not output_files: |
|
31 | if not output_files: | |
27 | return False |
|
32 | return False | |
28 |
|
33 | |||
29 | # Headers |
|
34 | # Headers | |
30 | zip_filename = os.path.splitext(name)[0] + '.zip' |
|
35 | zip_filename = os.path.splitext(name)[0] + '.zip' | |
31 | handler.set_header('Content-Disposition', |
|
36 | handler.set_header('Content-Disposition', | |
32 | 'attachment; filename="%s"' % zip_filename) |
|
37 | 'attachment; filename="%s"' % zip_filename) | |
33 | handler.set_header('Content-Type', 'application/zip') |
|
38 | handler.set_header('Content-Type', 'application/zip') | |
34 |
|
39 | |||
35 | # Prepare the zip file |
|
40 | # Prepare the zip file | |
36 | buffer = io.BytesIO() |
|
41 | buffer = io.BytesIO() | |
37 | zipf = zipfile.ZipFile(buffer, mode='w', compression=zipfile.ZIP_DEFLATED) |
|
42 | zipf = zipfile.ZipFile(buffer, mode='w', compression=zipfile.ZIP_DEFLATED) | |
38 | output_filename = os.path.splitext(name)[0] + '.' + resources['output_extension'] |
|
43 | output_filename = os.path.splitext(name)[0] + '.' + resources['output_extension'] | |
39 | zipf.writestr(output_filename, cast_bytes(output, 'utf-8')) |
|
44 | zipf.writestr(output_filename, cast_bytes(output, 'utf-8')) | |
40 | for filename, data in output_files.items(): |
|
45 | for filename, data in output_files.items(): | |
41 | zipf.writestr(os.path.basename(filename), data) |
|
46 | zipf.writestr(os.path.basename(filename), data) | |
42 | zipf.close() |
|
47 | zipf.close() | |
43 |
|
48 | |||
44 | handler.finish(buffer.getvalue()) |
|
49 | handler.finish(buffer.getvalue()) | |
45 | return True |
|
50 | return True | |
46 |
|
51 | |||
47 | def get_exporter(format, **kwargs): |
|
52 | def get_exporter(format, **kwargs): | |
48 | """get an exporter, raising appropriate errors""" |
|
53 | """get an exporter, raising appropriate errors""" | |
49 | # if this fails, will raise 500 |
|
54 | # if this fails, will raise 500 | |
50 | try: |
|
55 | try: | |
51 | from IPython.nbconvert.exporters.export import exporter_map |
|
56 | from IPython.nbconvert.exporters.export import exporter_map | |
52 | except ImportError as e: |
|
57 | except ImportError as e: | |
53 | raise web.HTTPError(500, "Could not import nbconvert: %s" % e) |
|
58 | raise web.HTTPError(500, "Could not import nbconvert: %s" % e) | |
54 |
|
59 | |||
55 | try: |
|
60 | try: | |
56 | Exporter = exporter_map[format] |
|
61 | Exporter = exporter_map[format] | |
57 | except KeyError: |
|
62 | except KeyError: | |
58 | # should this be 400? |
|
63 | # should this be 400? | |
59 | raise web.HTTPError(404, u"No exporter for format: %s" % format) |
|
64 | raise web.HTTPError(404, u"No exporter for format: %s" % format) | |
60 |
|
65 | |||
61 | try: |
|
66 | try: | |
62 | return Exporter(**kwargs) |
|
67 | return Exporter(**kwargs) | |
63 | except Exception as e: |
|
68 | except Exception as e: | |
64 | raise web.HTTPError(500, "Could not construct Exporter: %s" % e) |
|
69 | raise web.HTTPError(500, "Could not construct Exporter: %s" % e) | |
65 |
|
70 | |||
66 | class NbconvertFileHandler(IPythonHandler): |
|
71 | class NbconvertFileHandler(IPythonHandler): | |
67 |
|
72 | |||
68 | SUPPORTED_METHODS = ('GET',) |
|
73 | SUPPORTED_METHODS = ('GET',) | |
69 |
|
74 | |||
70 | @web.authenticated |
|
75 | @web.authenticated | |
71 | def get(self, format, path='', name=None): |
|
76 | def get(self, format, path='', name=None): | |
72 |
|
77 | |||
73 | exporter = get_exporter(format, config=self.config, log=self.log) |
|
78 | exporter = get_exporter(format, config=self.config, log=self.log) | |
74 |
|
79 | |||
75 | path = path.strip('/') |
|
80 | path = path.strip('/') | |
76 | model = self.contents_manager.get(name=name, path=path) |
|
81 | model = self.contents_manager.get_model(name=name, path=path) | |
77 |
|
82 | |||
78 | self.set_header('Last-Modified', model['last_modified']) |
|
83 | self.set_header('Last-Modified', model['last_modified']) | |
79 |
|
84 | |||
80 | try: |
|
85 | try: | |
81 | output, resources = exporter.from_notebook_node(model['content']) |
|
86 | output, resources = exporter.from_notebook_node(model['content']) | |
82 | except Exception as e: |
|
87 | except Exception as e: | |
83 | raise web.HTTPError(500, "nbconvert failed: %s" % e) |
|
88 | raise web.HTTPError(500, "nbconvert failed: %s" % e) | |
84 |
|
89 | |||
85 | if respond_zip(self, name, output, resources): |
|
90 | if respond_zip(self, name, output, resources): | |
86 | return |
|
91 | return | |
87 |
|
92 | |||
88 | # Force download if requested |
|
93 | # Force download if requested | |
89 | if self.get_argument('download', 'false').lower() == 'true': |
|
94 | if self.get_argument('download', 'false').lower() == 'true': | |
90 | filename = os.path.splitext(name)[0] + '.' + resources['output_extension'] |
|
95 | filename = os.path.splitext(name)[0] + '.' + resources['output_extension'] | |
91 | self.set_header('Content-Disposition', |
|
96 | self.set_header('Content-Disposition', | |
92 | 'attachment; filename="%s"' % filename) |
|
97 | 'attachment; filename="%s"' % filename) | |
93 |
|
98 | |||
94 | # MIME type |
|
99 | # MIME type | |
95 | if exporter.output_mimetype: |
|
100 | if exporter.output_mimetype: | |
96 | self.set_header('Content-Type', |
|
101 | self.set_header('Content-Type', | |
97 | '%s; charset=utf-8' % exporter.output_mimetype) |
|
102 | '%s; charset=utf-8' % exporter.output_mimetype) | |
98 |
|
103 | |||
99 | self.finish(output) |
|
104 | self.finish(output) | |
100 |
|
105 | |||
101 | class NbconvertPostHandler(IPythonHandler): |
|
106 | class NbconvertPostHandler(IPythonHandler): | |
102 | SUPPORTED_METHODS = ('POST',) |
|
107 | SUPPORTED_METHODS = ('POST',) | |
103 |
|
108 | |||
104 | @web.authenticated |
|
109 | @web.authenticated | |
105 | def post(self, format): |
|
110 | def post(self, format): | |
106 | exporter = get_exporter(format, config=self.config) |
|
111 | exporter = get_exporter(format, config=self.config) | |
107 |
|
112 | |||
108 | model = self.get_json_body() |
|
113 | model = self.get_json_body() | |
109 | nbnode = to_notebook_json(model['content']) |
|
114 | nbnode = to_notebook_json(model['content']) | |
110 |
|
115 | |||
111 | try: |
|
116 | try: | |
112 | output, resources = exporter.from_notebook_node(nbnode) |
|
117 | output, resources = exporter.from_notebook_node(nbnode) | |
113 | except Exception as e: |
|
118 | except Exception as e: | |
114 | raise web.HTTPError(500, "nbconvert failed: %s" % e) |
|
119 | raise web.HTTPError(500, "nbconvert failed: %s" % e) | |
115 |
|
120 | |||
116 | if respond_zip(self, nbnode.metadata.name, output, resources): |
|
121 | if respond_zip(self, nbnode.metadata.name, output, resources): | |
117 | return |
|
122 | return | |
118 |
|
123 | |||
119 | # MIME type |
|
124 | # MIME type | |
120 | if exporter.output_mimetype: |
|
125 | if exporter.output_mimetype: | |
121 | self.set_header('Content-Type', |
|
126 | self.set_header('Content-Type', | |
122 | '%s; charset=utf-8' % exporter.output_mimetype) |
|
127 | '%s; charset=utf-8' % exporter.output_mimetype) | |
123 |
|
128 | |||
124 | self.finish(output) |
|
129 | self.finish(output) | |
125 |
|
130 | |||
126 | #----------------------------------------------------------------------------- |
|
131 | #----------------------------------------------------------------------------- | |
127 | # URL to handler mappings |
|
132 | # URL to handler mappings | |
128 | #----------------------------------------------------------------------------- |
|
133 | #----------------------------------------------------------------------------- | |
129 |
|
134 | |||
130 | _format_regex = r"(?P<format>\w+)" |
|
135 | _format_regex = r"(?P<format>\w+)" | |
131 |
|
136 | |||
132 |
|
137 | |||
133 | default_handlers = [ |
|
138 | default_handlers = [ | |
134 | (r"/nbconvert/%s%s" % (_format_regex, notebook_path_regex), |
|
139 | (r"/nbconvert/%s%s" % (_format_regex, notebook_path_regex), | |
135 | NbconvertFileHandler), |
|
140 | NbconvertFileHandler), | |
136 | (r"/nbconvert/%s" % _format_regex, NbconvertPostHandler), |
|
141 | (r"/nbconvert/%s" % _format_regex, NbconvertPostHandler), | |
137 | ] |
|
142 | ] |
@@ -1,437 +1,461 b'' | |||||
1 | """A contents manager that uses the local file system for storage.""" |
|
1 | """A contents manager that uses the local file system for storage.""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
|
6 | import base64 | |||
6 | import io |
|
7 | import io | |
7 | import os |
|
8 | import os | |
8 | import glob |
|
9 | import glob | |
9 | import shutil |
|
10 | import shutil | |
10 |
|
11 | |||
11 | from tornado import web |
|
12 | from tornado import web | |
12 |
|
13 | |||
13 | from .manager import ContentsManager |
|
14 | from .manager import ContentsManager | |
14 | from IPython.nbformat import current |
|
15 | from IPython.nbformat import current | |
15 | from IPython.utils.path import ensure_dir_exists |
|
16 | from IPython.utils.path import ensure_dir_exists | |
16 | from IPython.utils.traitlets import Unicode, Bool, TraitError |
|
17 | from IPython.utils.traitlets import Unicode, Bool, TraitError | |
17 | from IPython.utils.py3compat import getcwd |
|
18 | from IPython.utils.py3compat import getcwd | |
18 | from IPython.utils import tz |
|
19 | from IPython.utils import tz | |
19 | from IPython.html.utils import is_hidden, to_os_path |
|
20 | from IPython.html.utils import is_hidden, to_os_path | |
20 |
|
21 | |||
21 | def sort_key(item): |
|
22 | def sort_key(item): | |
22 | """Case-insensitive sorting.""" |
|
23 | """Case-insensitive sorting.""" | |
23 | return item['name'].lower() |
|
24 | return item['name'].lower() | |
24 |
|
25 | |||
25 |
|
26 | |||
26 | class FileContentsManager(ContentsManager): |
|
27 | class FileContentsManager(ContentsManager): | |
27 |
|
28 | |||
28 | root_dir = Unicode(getcwd(), config=True) |
|
29 | root_dir = Unicode(getcwd(), config=True) | |
29 |
|
30 | |||
30 | def _root_dir_changed(self, name, old, new): |
|
31 | def _root_dir_changed(self, name, old, new): | |
31 | """Do a bit of validation of the root_dir.""" |
|
32 | """Do a bit of validation of the root_dir.""" | |
32 | if not os.path.isabs(new): |
|
33 | if not os.path.isabs(new): | |
33 | # If we receive a non-absolute path, make it absolute. |
|
34 | # If we receive a non-absolute path, make it absolute. | |
34 | self.root_dir = os.path.abspath(new) |
|
35 | self.root_dir = os.path.abspath(new) | |
35 | return |
|
36 | return | |
36 | if not os.path.exists(new) or not os.path.isdir(new): |
|
37 | if not os.path.exists(new) or not os.path.isdir(new): | |
37 | raise TraitError("%r is not a directory" % new) |
|
38 | raise TraitError("%r is not a directory" % new) | |
38 |
|
39 | |||
39 | checkpoint_dir = Unicode('.ipynb_checkpoints', config=True, |
|
40 | checkpoint_dir = Unicode('.ipynb_checkpoints', config=True, | |
40 | help="""The directory name in which to keep notebook checkpoints |
|
41 | help="""The directory name in which to keep notebook checkpoints | |
41 |
|
42 | |||
42 | This is a path relative to the notebook's own directory. |
|
43 | This is a path relative to the notebook's own directory. | |
43 |
|
44 | |||
44 | By default, it is .ipynb_checkpoints |
|
45 | By default, it is .ipynb_checkpoints | |
45 | """ |
|
46 | """ | |
46 | ) |
|
47 | ) | |
47 |
|
48 | |||
48 | def _copy(self, src, dest): |
|
49 | def _copy(self, src, dest): | |
49 | """copy src to dest |
|
50 | """copy src to dest | |
50 |
|
51 | |||
51 | like shutil.copy2, but log errors in copystat |
|
52 | like shutil.copy2, but log errors in copystat | |
52 | """ |
|
53 | """ | |
53 | shutil.copyfile(src, dest) |
|
54 | shutil.copyfile(src, dest) | |
54 | try: |
|
55 | try: | |
55 | shutil.copystat(src, dest) |
|
56 | shutil.copystat(src, dest) | |
56 | except OSError as e: |
|
57 | except OSError as e: | |
57 | self.log.debug("copystat on %s failed", dest, exc_info=True) |
|
58 | self.log.debug("copystat on %s failed", dest, exc_info=True) | |
58 |
|
59 | |||
59 |
def |
|
60 | def _get_os_path(self, name=None, path=''): | |
60 | """List all filenames in the path (relative to root_dir).""" |
|
61 | """Given a filename and a URL path, return its file system | |
61 |
path |
|
62 | path. | |
62 | if not os.path.isdir(self._get_os_path(path=path)): |
|
63 | ||
63 | raise web.HTTPError(404, 'Directory not found: ' + path) |
|
64 | Parameters | |
64 | names = glob.glob(self._get_os_path('*', path)) |
|
65 | ---------- | |
65 | names = [ os.path.basename(name) for name in names if os.path.isfile(name)] |
|
66 | name : string | |
66 |
|
|
67 | A filename | |
|
68 | path : string | |||
|
69 | The relative URL path (with '/' as separator) to the named | |||
|
70 | file. | |||
|
71 | ||||
|
72 | Returns | |||
|
73 | ------- | |||
|
74 | path : string | |||
|
75 | API path to be evaluated relative to root_dir. | |||
|
76 | """ | |||
|
77 | if name is not None: | |||
|
78 | path = path + '/' + name | |||
|
79 | return to_os_path(path, self.root_dir) | |||
67 |
|
80 | |||
68 | def path_exists(self, path): |
|
81 | def path_exists(self, path): | |
69 |
"""Does the API-style path |
|
82 | """Does the API-style path refer to an extant directory? | |
70 |
|
83 | |||
71 | Parameters |
|
84 | Parameters | |
72 | ---------- |
|
85 | ---------- | |
73 | path : string |
|
86 | path : string | |
74 | The path to check. This is an API path (`/` separated, |
|
87 | The path to check. This is an API path (`/` separated, | |
75 | relative to root_dir). |
|
88 | relative to root_dir). | |
76 |
|
89 | |||
77 | Returns |
|
90 | Returns | |
78 | ------- |
|
91 | ------- | |
79 | exists : bool |
|
92 | exists : bool | |
80 | Whether the path is indeed a directory. |
|
93 | Whether the path is indeed a directory. | |
81 | """ |
|
94 | """ | |
82 | path = path.strip('/') |
|
95 | path = path.strip('/') | |
83 | os_path = self._get_os_path(path=path) |
|
96 | os_path = self._get_os_path(path=path) | |
84 | return os.path.isdir(os_path) |
|
97 | return os.path.isdir(os_path) | |
85 |
|
98 | |||
86 | def is_hidden(self, path): |
|
99 | def is_hidden(self, path): | |
87 | """Does the API style path correspond to a hidden directory or file? |
|
100 | """Does the API style path correspond to a hidden directory or file? | |
88 |
|
101 | |||
89 | Parameters |
|
102 | Parameters | |
90 | ---------- |
|
103 | ---------- | |
91 | path : string |
|
104 | path : string | |
92 | The path to check. This is an API path (`/` separated, |
|
105 | The path to check. This is an API path (`/` separated, | |
93 | relative to root_dir). |
|
106 | relative to root_dir). | |
94 |
|
107 | |||
95 | Returns |
|
108 | Returns | |
96 | ------- |
|
109 | ------- | |
97 | exists : bool |
|
110 | exists : bool | |
98 | Whether the path is hidden. |
|
111 | Whether the path is hidden. | |
99 |
|
112 | |||
100 | """ |
|
113 | """ | |
101 | path = path.strip('/') |
|
114 | path = path.strip('/') | |
102 | os_path = self._get_os_path(path=path) |
|
115 | os_path = self._get_os_path(path=path) | |
103 | return is_hidden(os_path, self.root_dir) |
|
116 | return is_hidden(os_path, self.root_dir) | |
104 |
|
117 | |||
105 |
def _ |
|
118 | def file_exists(self, name, path=''): | |
106 | """Given a filename and a URL path, return its file system |
|
119 | """Returns True if the file exists, else returns False. | |
107 | path. |
|
|||
108 |
|
120 | |||
109 | Parameters |
|
121 | Parameters | |
110 | ---------- |
|
122 | ---------- | |
111 | name : string |
|
123 | name : string | |
112 | A filename |
|
124 | The name of the file you are checking. | |
113 | path : string |
|
125 | path : string | |
114 |
The relative |
|
126 | The relative path to the file's directory (with '/' as separator) | |
115 | file. |
|
|||
116 |
|
127 | |||
117 | Returns |
|
128 | Returns | |
118 | ------- |
|
129 | ------- | |
119 | path : string |
|
130 | bool | |
120 | API path to be evaluated relative to root_dir. |
|
|||
121 | """ |
|
131 | """ | |
122 | if name is not None: |
|
132 | path = path.strip('/') | |
123 | path = path + '/' + name |
|
133 | nbpath = self._get_os_path(name, path=path) | |
124 |
return |
|
134 | return os.path.isfile(nbpath) | |
125 |
|
135 | |||
126 |
def |
|
136 | def exists(self, name=None, path=''): | |
127 |
"""Returns |
|
137 | """Returns True if the path [and name] exists, else returns False. | |
128 |
|
138 | |||
129 | Parameters |
|
139 | Parameters | |
130 | ---------- |
|
140 | ---------- | |
131 | name : string |
|
141 | name : string | |
132 | The name of the file you are checking. |
|
142 | The name of the file you are checking. | |
133 | path : string |
|
143 | path : string | |
134 | The relative path to the file's directory (with '/' as separator) |
|
144 | The relative path to the file's directory (with '/' as separator) | |
135 |
|
145 | |||
136 | Returns |
|
146 | Returns | |
137 | ------- |
|
147 | ------- | |
138 | bool |
|
148 | bool | |
139 | """ |
|
149 | """ | |
140 | path = path.strip('/') |
|
150 | path = path.strip('/') | |
141 |
|
|
151 | os_path = self._get_os_path(name, path=path) | |
142 |
return os.path. |
|
152 | return os.path.exists(os_path) | |
143 |
|
153 | |||
144 | # TODO: Remove this after we create the contents web service and directories are |
|
154 | def _base_model(self, name, path=''): | |
145 | # no longer listed by the notebook web service. |
|
155 | """Build the common base of a contents model""" | |
146 | def list_dirs(self, path): |
|
|||
147 | """List the directories for a given API style path.""" |
|
|||
148 | path = path.strip('/') |
|
|||
149 | os_path = self._get_os_path('', path) |
|
|||
150 | if not os.path.isdir(os_path): |
|
|||
151 | raise web.HTTPError(404, u'directory does not exist: %r' % os_path) |
|
|||
152 | elif is_hidden(os_path, self.root_dir): |
|
|||
153 | self.log.info("Refusing to serve hidden directory, via 404 Error") |
|
|||
154 | raise web.HTTPError(404, u'directory does not exist: %r' % os_path) |
|
|||
155 | dir_names = os.listdir(os_path) |
|
|||
156 | dirs = [] |
|
|||
157 | for name in dir_names: |
|
|||
158 | os_path = self._get_os_path(name, path) |
|
|||
159 | if os.path.isdir(os_path) and not is_hidden(os_path, self.root_dir)\ |
|
|||
160 | and self.should_list(name): |
|
|||
161 | try: |
|
|||
162 | model = self.get_dir_model(name, path) |
|
|||
163 | except IOError: |
|
|||
164 | pass |
|
|||
165 | dirs.append(model) |
|
|||
166 | dirs = sorted(dirs, key=sort_key) |
|
|||
167 | return dirs |
|
|||
168 |
|
||||
169 | # TODO: Remove this after we create the contents web service and directories are |
|
|||
170 | # no longer listed by the notebook web service. |
|
|||
171 | def get_dir_model(self, name, path=''): |
|
|||
172 | """Get the directory model given a directory name and its API style path""" |
|
|||
173 | path = path.strip('/') |
|
|||
174 | os_path = self._get_os_path(name, path) |
|
156 | os_path = self._get_os_path(name, path) | |
175 | if not os.path.isdir(os_path): |
|
|||
176 | raise IOError('directory does not exist: %r' % os_path) |
|
|||
177 | info = os.stat(os_path) |
|
157 | info = os.stat(os_path) | |
178 | last_modified = tz.utcfromtimestamp(info.st_mtime) |
|
158 | last_modified = tz.utcfromtimestamp(info.st_mtime) | |
179 | created = tz.utcfromtimestamp(info.st_ctime) |
|
159 | created = tz.utcfromtimestamp(info.st_ctime) | |
180 | # Create the notebook model. |
|
160 | # Create the notebook model. | |
181 | model ={} |
|
161 | model = {} | |
182 | model['name'] = name |
|
162 | model['name'] = name | |
183 | model['path'] = path |
|
163 | model['path'] = path | |
184 | model['last_modified'] = last_modified |
|
164 | model['last_modified'] = last_modified | |
185 | model['created'] = created |
|
165 | model['created'] = created | |
|
166 | model['content'] = None | |||
|
167 | model['format'] = None | |||
|
168 | return model | |||
|
169 | ||||
|
170 | def _dir_model(self, name, path='', content=True): | |||
|
171 | """Build a model for a directory | |||
|
172 | ||||
|
173 | if content is requested, will include a listing of the directory | |||
|
174 | """ | |||
|
175 | os_path = self._get_os_path(name, path) | |||
|
176 | ||||
|
177 | if not os.path.isdir(os_path): | |||
|
178 | raise web.HTTPError(404, u'directory does not exist: %r' % os_path) | |||
|
179 | elif is_hidden(os_path, self.root_dir): | |||
|
180 | self.log.info("Refusing to serve hidden directory, via 404 Error") | |||
|
181 | raise web.HTTPError(404, u'directory does not exist: %r' % os_path) | |||
|
182 | ||||
|
183 | if name is None: | |||
|
184 | if '/' in path: | |||
|
185 | path, name = path.rsplit('/', 1) | |||
|
186 | else: | |||
|
187 | name = '' | |||
|
188 | model = self._base_model(name, path) | |||
186 | model['type'] = 'directory' |
|
189 | model['type'] = 'directory' | |
|
190 | dir_path = u'{}/{}'.format(path, name) | |||
|
191 | if content: | |||
|
192 | contents = [] | |||
|
193 | for os_path in glob.glob(self._get_os_path('*', dir_path)): | |||
|
194 | name = os.path.basename(os_path) | |||
|
195 | if self.should_list(name) and not is_hidden(os_path, self.root_dir): | |||
|
196 | contents.append(self.get_model(name=name, path=dir_path, content=False)) | |||
|
197 | ||||
|
198 | model['content'] = sorted(contents, key=sort_key) | |||
|
199 | ||||
187 | return model |
|
200 | return model | |
188 |
|
201 | |||
189 |
def |
|
202 | def _file_model(self, name, path='', content=True): | |
190 | """Returns a list of dictionaries that are the standard model |
|
203 | """Build a model for a file | |
191 | for all notebooks in the relative 'path'. |
|
|||
192 |
|
204 | |||
193 | Parameters |
|
205 | if content is requested, include the file contents. | |
194 | ---------- |
|
206 | Text files will be unicode, binary files will be base64-encoded. | |
195 | path : str |
|
207 | """ | |
196 | the URL path that describes the relative path for the |
|
208 | model = self._base_model(name, path) | |
197 | listed notebooks |
|
209 | model['type'] = 'file' | |
|
210 | if content: | |||
|
211 | os_path = self._get_os_path(name, path) | |||
|
212 | try: | |||
|
213 | with io.open(os_path, 'r', encoding='utf-8') as f: | |||
|
214 | model['content'] = f.read() | |||
|
215 | except UnicodeError as e: | |||
|
216 | with io.open(os_path, 'rb') as f: | |||
|
217 | bcontent = f.read() | |||
|
218 | model['content'] = base64.encodestring(bcontent).decode('ascii') | |||
|
219 | model['format'] = 'base64' | |||
|
220 | else: | |||
|
221 | model['format'] = 'text' | |||
|
222 | return model | |||
198 |
|
223 | |||
199 | Returns |
|
224 | ||
200 | ------- |
|
225 | def _notebook_model(self, name, path='', content=True): | |
201 | notebooks : list of dicts |
|
226 | """Build a notebook model | |
202 | a list of the notebook models without 'content' |
|
227 | ||
|
228 | if content is requested, the notebook content will be populated | |||
|
229 | as a JSON structure (not double-serialized) | |||
203 | """ |
|
230 | """ | |
204 | path = path.strip('/') |
|
231 | model = self._base_model(name, path) | |
205 | names = self.get_names(path) |
|
232 | model['type'] = 'notebook' | |
206 | notebooks = [self.get(name, path, content=False) |
|
233 | if content: | |
207 | for name in names if self.should_list(name)] |
|
234 | os_path = self._get_os_path(name, path) | |
208 | notebooks = sorted(notebooks, key=sort_key) |
|
235 | with io.open(os_path, 'r', encoding='utf-8') as f: | |
209 | return notebooks |
|
236 | try: | |
|
237 | nb = current.read(f, u'json') | |||
|
238 | except Exception as e: | |||
|
239 | raise web.HTTPError(400, u"Unreadable Notebook: %s %s" % (os_path, e)) | |||
|
240 | self.mark_trusted_cells(nb, name, path) | |||
|
241 | model['content'] = nb | |||
|
242 | model['format'] = 'json' | |||
|
243 | return model | |||
210 |
|
244 | |||
211 | def get(self, name, path='', content=True): |
|
245 | def get_model(self, name, path='', content=True): | |
212 |
""" Takes a path and name for a |
|
246 | """ Takes a path and name for an entity and returns its model | |
213 |
|
247 | |||
214 | Parameters |
|
248 | Parameters | |
215 | ---------- |
|
249 | ---------- | |
216 | name : str |
|
250 | name : str | |
217 |
the name of the |
|
251 | the name of the target | |
218 | path : str |
|
252 | path : str | |
219 | the URL path that describes the relative path for |
|
253 | the URL path that describes the relative path for | |
220 | the notebook |
|
254 | the notebook | |
221 |
|
255 | |||
222 | Returns |
|
256 | Returns | |
223 | ------- |
|
257 | ------- | |
224 | model : dict |
|
258 | model : dict | |
225 |
the |
|
259 | the contents model. If content=True, returns the contents | |
226 |
|
|
260 | of the file or directory as well. | |
227 | """ |
|
261 | """ | |
228 | path = path.strip('/') |
|
262 | path = path.strip('/') | |
229 | if not self.file_exists(name=name, path=path): |
|
263 | ||
230 | raise web.HTTPError(404, u'Notebook does not exist: %s' % name) |
|
264 | if not self.exists(name=name, path=path): | |
|
265 | raise web.HTTPError(404, u'No such file or directory: %s/%s' % (path, name)) | |||
|
266 | ||||
231 | os_path = self._get_os_path(name, path) |
|
267 | os_path = self._get_os_path(name, path) | |
232 |
i |
|
268 | if os.path.isdir(os_path): | |
233 | last_modified = tz.utcfromtimestamp(info.st_mtime) |
|
269 | model = self._dir_model(name, path, content) | |
234 | created = tz.utcfromtimestamp(info.st_ctime) |
|
270 | elif name.endswith('.ipynb'): | |
235 | # Create the notebook model. |
|
271 | model = self._notebook_model(name, path, content) | |
236 |
|
|
272 | else: | |
237 | model['name'] = name |
|
273 | model = self._file_model(name, path, content) | |
238 | model['path'] = path |
|
|||
239 | model['last_modified'] = last_modified |
|
|||
240 | model['created'] = created |
|
|||
241 | model['type'] = 'notebook' |
|
|||
242 | if content: |
|
|||
243 | with io.open(os_path, 'r', encoding='utf-8') as f: |
|
|||
244 | try: |
|
|||
245 | nb = current.read(f, u'json') |
|
|||
246 | except Exception as e: |
|
|||
247 | raise web.HTTPError(400, u"Unreadable Notebook: %s %s" % (os_path, e)) |
|
|||
248 | self.mark_trusted_cells(nb, name, path) |
|
|||
249 | model['content'] = nb |
|
|||
250 | return model |
|
274 | return model | |
251 |
|
275 | |||
252 | def save(self, model, name='', path=''): |
|
276 | def save(self, model, name='', path=''): | |
253 | """Save the notebook model and return the model with no content.""" |
|
277 | """Save the notebook model and return the model with no content.""" | |
254 | path = path.strip('/') |
|
278 | path = path.strip('/') | |
255 |
|
279 | |||
256 | if 'content' not in model: |
|
280 | if 'content' not in model: | |
257 | raise web.HTTPError(400, u'No notebook JSON data provided') |
|
281 | raise web.HTTPError(400, u'No notebook JSON data provided') | |
258 |
|
282 | |||
259 | # One checkpoint should always exist |
|
283 | # One checkpoint should always exist | |
260 | if self.file_exists(name, path) and not self.list_checkpoints(name, path): |
|
284 | if self.file_exists(name, path) and not self.list_checkpoints(name, path): | |
261 | self.create_checkpoint(name, path) |
|
285 | self.create_checkpoint(name, path) | |
262 |
|
286 | |||
263 | new_path = model.get('path', path).strip('/') |
|
287 | new_path = model.get('path', path).strip('/') | |
264 | new_name = model.get('name', name) |
|
288 | new_name = model.get('name', name) | |
265 |
|
289 | |||
266 | if path != new_path or name != new_name: |
|
290 | if path != new_path or name != new_name: | |
267 | self.rename(name, path, new_name, new_path) |
|
291 | self.rename(name, path, new_name, new_path) | |
268 |
|
292 | |||
269 | # Save the notebook file |
|
293 | # Save the notebook file | |
270 | os_path = self._get_os_path(new_name, new_path) |
|
294 | os_path = self._get_os_path(new_name, new_path) | |
271 | nb = current.to_notebook_json(model['content']) |
|
295 | nb = current.to_notebook_json(model['content']) | |
272 |
|
296 | |||
273 | self.check_and_sign(nb, new_name, new_path) |
|
297 | self.check_and_sign(nb, new_name, new_path) | |
274 |
|
298 | |||
275 | if 'name' in nb['metadata']: |
|
299 | if 'name' in nb['metadata']: | |
276 | nb['metadata']['name'] = u'' |
|
300 | nb['metadata']['name'] = u'' | |
277 | try: |
|
301 | try: | |
278 | self.log.debug("Autosaving notebook %s", os_path) |
|
302 | self.log.debug("Autosaving notebook %s", os_path) | |
279 | with io.open(os_path, 'w', encoding='utf-8') as f: |
|
303 | with io.open(os_path, 'w', encoding='utf-8') as f: | |
280 | current.write(nb, f, u'json') |
|
304 | current.write(nb, f, u'json') | |
281 | except Exception as e: |
|
305 | except Exception as e: | |
282 | raise web.HTTPError(400, u'Unexpected error while autosaving notebook: %s %s' % (os_path, e)) |
|
306 | raise web.HTTPError(400, u'Unexpected error while autosaving notebook: %s %s' % (os_path, e)) | |
283 |
|
307 | |||
284 | model = self.get(new_name, new_path, content=False) |
|
308 | model = self.get_model(new_name, new_path, content=False) | |
285 | return model |
|
309 | return model | |
286 |
|
310 | |||
287 | def update(self, model, name, path=''): |
|
311 | def update(self, model, name, path=''): | |
288 | """Update the file's path and/or name""" |
|
312 | """Update the file's path and/or name""" | |
289 | path = path.strip('/') |
|
313 | path = path.strip('/') | |
290 | new_name = model.get('name', name) |
|
314 | new_name = model.get('name', name) | |
291 | new_path = model.get('path', path).strip('/') |
|
315 | new_path = model.get('path', path).strip('/') | |
292 | if path != new_path or name != new_name: |
|
316 | if path != new_path or name != new_name: | |
293 | self.rename(name, path, new_name, new_path) |
|
317 | self.rename(name, path, new_name, new_path) | |
294 | model = self.get(new_name, new_path, content=False) |
|
318 | model = self.get_model(new_name, new_path, content=False) | |
295 | return model |
|
319 | return model | |
296 |
|
320 | |||
297 | def delete(self, name, path=''): |
|
321 | def delete(self, name, path=''): | |
298 | """Delete file by name and path.""" |
|
322 | """Delete file by name and path.""" | |
299 | path = path.strip('/') |
|
323 | path = path.strip('/') | |
300 | os_path = self._get_os_path(name, path) |
|
324 | os_path = self._get_os_path(name, path) | |
301 | if not os.path.isfile(os_path): |
|
325 | if not os.path.isfile(os_path): | |
302 | raise web.HTTPError(404, u'File does not exist: %s' % os_path) |
|
326 | raise web.HTTPError(404, u'File does not exist: %s' % os_path) | |
303 |
|
327 | |||
304 | # clear checkpoints |
|
328 | # clear checkpoints | |
305 | for checkpoint in self.list_checkpoints(name, path): |
|
329 | for checkpoint in self.list_checkpoints(name, path): | |
306 | checkpoint_id = checkpoint['id'] |
|
330 | checkpoint_id = checkpoint['id'] | |
307 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) |
|
331 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) | |
308 | if os.path.isfile(cp_path): |
|
332 | if os.path.isfile(cp_path): | |
309 | self.log.debug("Unlinking checkpoint %s", cp_path) |
|
333 | self.log.debug("Unlinking checkpoint %s", cp_path) | |
310 | os.unlink(cp_path) |
|
334 | os.unlink(cp_path) | |
311 |
|
335 | |||
312 | self.log.debug("Unlinking file %s", os_path) |
|
336 | self.log.debug("Unlinking file %s", os_path) | |
313 | os.unlink(os_path) |
|
337 | os.unlink(os_path) | |
314 |
|
338 | |||
315 | def rename(self, old_name, old_path, new_name, new_path): |
|
339 | def rename(self, old_name, old_path, new_name, new_path): | |
316 | """Rename a file.""" |
|
340 | """Rename a file.""" | |
317 | old_path = old_path.strip('/') |
|
341 | old_path = old_path.strip('/') | |
318 | new_path = new_path.strip('/') |
|
342 | new_path = new_path.strip('/') | |
319 | if new_name == old_name and new_path == old_path: |
|
343 | if new_name == old_name and new_path == old_path: | |
320 | return |
|
344 | return | |
321 |
|
345 | |||
322 | new_os_path = self._get_os_path(new_name, new_path) |
|
346 | new_os_path = self._get_os_path(new_name, new_path) | |
323 | old_os_path = self._get_os_path(old_name, old_path) |
|
347 | old_os_path = self._get_os_path(old_name, old_path) | |
324 |
|
348 | |||
325 | # Should we proceed with the move? |
|
349 | # Should we proceed with the move? | |
326 | if os.path.isfile(new_os_path): |
|
350 | if os.path.isfile(new_os_path): | |
327 | raise web.HTTPError(409, u'Notebook with name already exists: %s' % new_os_path) |
|
351 | raise web.HTTPError(409, u'Notebook with name already exists: %s' % new_os_path) | |
328 |
|
352 | |||
329 | # Move the file |
|
353 | # Move the file | |
330 | try: |
|
354 | try: | |
331 | shutil.move(old_os_path, new_os_path) |
|
355 | shutil.move(old_os_path, new_os_path) | |
332 | except Exception as e: |
|
356 | except Exception as e: | |
333 | raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % (old_os_path, e)) |
|
357 | raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % (old_os_path, e)) | |
334 |
|
358 | |||
335 | # Move the checkpoints |
|
359 | # Move the checkpoints | |
336 | old_checkpoints = self.list_checkpoints(old_name, old_path) |
|
360 | old_checkpoints = self.list_checkpoints(old_name, old_path) | |
337 | for cp in old_checkpoints: |
|
361 | for cp in old_checkpoints: | |
338 | checkpoint_id = cp['id'] |
|
362 | checkpoint_id = cp['id'] | |
339 | old_cp_path = self.get_checkpoint_path(checkpoint_id, old_name, old_path) |
|
363 | old_cp_path = self.get_checkpoint_path(checkpoint_id, old_name, old_path) | |
340 | new_cp_path = self.get_checkpoint_path(checkpoint_id, new_name, new_path) |
|
364 | new_cp_path = self.get_checkpoint_path(checkpoint_id, new_name, new_path) | |
341 | if os.path.isfile(old_cp_path): |
|
365 | if os.path.isfile(old_cp_path): | |
342 | self.log.debug("Renaming checkpoint %s -> %s", old_cp_path, new_cp_path) |
|
366 | self.log.debug("Renaming checkpoint %s -> %s", old_cp_path, new_cp_path) | |
343 | shutil.move(old_cp_path, new_cp_path) |
|
367 | shutil.move(old_cp_path, new_cp_path) | |
344 |
|
368 | |||
345 | # Checkpoint-related utilities |
|
369 | # Checkpoint-related utilities | |
346 |
|
370 | |||
347 | def get_checkpoint_path(self, checkpoint_id, name, path=''): |
|
371 | def get_checkpoint_path(self, checkpoint_id, name, path=''): | |
348 | """find the path to a checkpoint""" |
|
372 | """find the path to a checkpoint""" | |
349 | path = path.strip('/') |
|
373 | path = path.strip('/') | |
350 | basename, ext = os.path.splitext(name) |
|
374 | basename, ext = os.path.splitext(name) | |
351 | filename = u"{name}-{checkpoint_id}{ext}".format( |
|
375 | filename = u"{name}-{checkpoint_id}{ext}".format( | |
352 | name=basename, |
|
376 | name=basename, | |
353 | checkpoint_id=checkpoint_id, |
|
377 | checkpoint_id=checkpoint_id, | |
354 | ext=ext, |
|
378 | ext=ext, | |
355 | ) |
|
379 | ) | |
356 | os_path = self._get_os_path(path=path) |
|
380 | os_path = self._get_os_path(path=path) | |
357 | cp_dir = os.path.join(os_path, self.checkpoint_dir) |
|
381 | cp_dir = os.path.join(os_path, self.checkpoint_dir) | |
358 | ensure_dir_exists(cp_dir) |
|
382 | ensure_dir_exists(cp_dir) | |
359 | cp_path = os.path.join(cp_dir, filename) |
|
383 | cp_path = os.path.join(cp_dir, filename) | |
360 | return cp_path |
|
384 | return cp_path | |
361 |
|
385 | |||
362 | def get_checkpoint_model(self, checkpoint_id, name, path=''): |
|
386 | def get_checkpoint_model(self, checkpoint_id, name, path=''): | |
363 | """construct the info dict for a given checkpoint""" |
|
387 | """construct the info dict for a given checkpoint""" | |
364 | path = path.strip('/') |
|
388 | path = path.strip('/') | |
365 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) |
|
389 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) | |
366 | stats = os.stat(cp_path) |
|
390 | stats = os.stat(cp_path) | |
367 | last_modified = tz.utcfromtimestamp(stats.st_mtime) |
|
391 | last_modified = tz.utcfromtimestamp(stats.st_mtime) | |
368 | info = dict( |
|
392 | info = dict( | |
369 | id = checkpoint_id, |
|
393 | id = checkpoint_id, | |
370 | last_modified = last_modified, |
|
394 | last_modified = last_modified, | |
371 | ) |
|
395 | ) | |
372 | return info |
|
396 | return info | |
373 |
|
397 | |||
374 | # public checkpoint API |
|
398 | # public checkpoint API | |
375 |
|
399 | |||
376 | def create_checkpoint(self, name, path=''): |
|
400 | def create_checkpoint(self, name, path=''): | |
377 | """Create a checkpoint from the current state of a file""" |
|
401 | """Create a checkpoint from the current state of a file""" | |
378 | path = path.strip('/') |
|
402 | path = path.strip('/') | |
379 | src_path = self._get_os_path(name, path) |
|
403 | src_path = self._get_os_path(name, path) | |
380 | # only the one checkpoint ID: |
|
404 | # only the one checkpoint ID: | |
381 | checkpoint_id = u"checkpoint" |
|
405 | checkpoint_id = u"checkpoint" | |
382 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) |
|
406 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) | |
383 | self.log.debug("creating checkpoint for notebook %s", name) |
|
407 | self.log.debug("creating checkpoint for notebook %s", name) | |
384 | self._copy(src_path, cp_path) |
|
408 | self._copy(src_path, cp_path) | |
385 |
|
409 | |||
386 | # return the checkpoint info |
|
410 | # return the checkpoint info | |
387 | return self.get_checkpoint_model(checkpoint_id, name, path) |
|
411 | return self.get_checkpoint_model(checkpoint_id, name, path) | |
388 |
|
412 | |||
389 | def list_checkpoints(self, name, path=''): |
|
413 | def list_checkpoints(self, name, path=''): | |
390 | """list the checkpoints for a given file |
|
414 | """list the checkpoints for a given file | |
391 |
|
415 | |||
392 | This contents manager currently only supports one checkpoint per file. |
|
416 | This contents manager currently only supports one checkpoint per file. | |
393 | """ |
|
417 | """ | |
394 | path = path.strip('/') |
|
418 | path = path.strip('/') | |
395 | checkpoint_id = "checkpoint" |
|
419 | checkpoint_id = "checkpoint" | |
396 | os_path = self.get_checkpoint_path(checkpoint_id, name, path) |
|
420 | os_path = self.get_checkpoint_path(checkpoint_id, name, path) | |
397 | if not os.path.exists(os_path): |
|
421 | if not os.path.exists(os_path): | |
398 | return [] |
|
422 | return [] | |
399 | else: |
|
423 | else: | |
400 | return [self.get_checkpoint_model(checkpoint_id, name, path)] |
|
424 | return [self.get_checkpoint_model(checkpoint_id, name, path)] | |
401 |
|
425 | |||
402 |
|
426 | |||
403 | def restore_checkpoint(self, checkpoint_id, name, path=''): |
|
427 | def restore_checkpoint(self, checkpoint_id, name, path=''): | |
404 | """restore a file to a checkpointed state""" |
|
428 | """restore a file to a checkpointed state""" | |
405 | path = path.strip('/') |
|
429 | path = path.strip('/') | |
406 | self.log.info("restoring %s from checkpoint %s", name, checkpoint_id) |
|
430 | self.log.info("restoring %s from checkpoint %s", name, checkpoint_id) | |
407 | nb_path = self._get_os_path(name, path) |
|
431 | nb_path = self._get_os_path(name, path) | |
408 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) |
|
432 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) | |
409 | if not os.path.isfile(cp_path): |
|
433 | if not os.path.isfile(cp_path): | |
410 | self.log.debug("checkpoint file does not exist: %s", cp_path) |
|
434 | self.log.debug("checkpoint file does not exist: %s", cp_path) | |
411 | raise web.HTTPError(404, |
|
435 | raise web.HTTPError(404, | |
412 | u'checkpoint does not exist: %s-%s' % (name, checkpoint_id) |
|
436 | u'checkpoint does not exist: %s-%s' % (name, checkpoint_id) | |
413 | ) |
|
437 | ) | |
414 | # ensure notebook is readable (never restore from an unreadable notebook) |
|
438 | # ensure notebook is readable (never restore from an unreadable notebook) | |
415 | if cp_path.endswith('.ipynb'): |
|
439 | if cp_path.endswith('.ipynb'): | |
416 | with io.open(cp_path, 'r', encoding='utf-8') as f: |
|
440 | with io.open(cp_path, 'r', encoding='utf-8') as f: | |
417 | current.read(f, u'json') |
|
441 | current.read(f, u'json') | |
418 | self._copy(cp_path, nb_path) |
|
442 | self._copy(cp_path, nb_path) | |
419 | self.log.debug("copying %s -> %s", cp_path, nb_path) |
|
443 | self.log.debug("copying %s -> %s", cp_path, nb_path) | |
420 |
|
444 | |||
421 | def delete_checkpoint(self, checkpoint_id, name, path=''): |
|
445 | def delete_checkpoint(self, checkpoint_id, name, path=''): | |
422 | """delete a file's checkpoint""" |
|
446 | """delete a file's checkpoint""" | |
423 | path = path.strip('/') |
|
447 | path = path.strip('/') | |
424 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) |
|
448 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) | |
425 | if not os.path.isfile(cp_path): |
|
449 | if not os.path.isfile(cp_path): | |
426 | raise web.HTTPError(404, |
|
450 | raise web.HTTPError(404, | |
427 | u'Checkpoint does not exist: %s%s-%s' % (path, name, checkpoint_id) |
|
451 | u'Checkpoint does not exist: %s%s-%s' % (path, name, checkpoint_id) | |
428 | ) |
|
452 | ) | |
429 | self.log.debug("unlinking %s", cp_path) |
|
453 | self.log.debug("unlinking %s", cp_path) | |
430 | os.unlink(cp_path) |
|
454 | os.unlink(cp_path) | |
431 |
|
455 | |||
432 | def info_string(self): |
|
456 | def info_string(self): | |
433 | return "Serving notebooks from local directory: %s" % self.root_dir |
|
457 | return "Serving notebooks from local directory: %s" % self.root_dir | |
434 |
|
458 | |||
435 | def get_kernel_path(self, name, path='', model=None): |
|
459 | def get_kernel_path(self, name, path='', model=None): | |
436 | """Return the initial working dir a kernel associated with a given notebook""" |
|
460 | """Return the initial working dir a kernel associated with a given notebook""" | |
437 | return os.path.join(self.root_dir, path) |
|
461 | return os.path.join(self.root_dir, path) |
@@ -1,270 +1,273 b'' | |||||
1 | """Tornado handlers for the contents web service.""" |
|
1 | """Tornado handlers for the contents web service.""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | import json |
|
6 | import json | |
7 |
|
7 | |||
8 | from tornado import web |
|
8 | from tornado import web | |
9 |
|
9 | |||
10 | from IPython.html.utils import url_path_join, url_escape |
|
10 | from IPython.html.utils import url_path_join, url_escape | |
11 | from IPython.utils.jsonutil import date_default |
|
11 | from IPython.utils.jsonutil import date_default | |
12 |
|
12 | |||
13 | from IPython.html.base.handlers import (IPythonHandler, json_errors, |
|
13 | from IPython.html.base.handlers import (IPythonHandler, json_errors, | |
14 |
|
|
14 | file_path_regex, path_regex, | |
15 |
|
|
15 | file_name_regex) | |
16 |
|
16 | |||
17 |
|
17 | |||
18 | class ContentsHandler(IPythonHandler): |
|
18 | class ContentsHandler(IPythonHandler): | |
19 |
|
19 | |||
20 | SUPPORTED_METHODS = (u'GET', u'PUT', u'PATCH', u'POST', u'DELETE') |
|
20 | SUPPORTED_METHODS = (u'GET', u'PUT', u'PATCH', u'POST', u'DELETE') | |
21 |
|
21 | |||
22 |
def location_url(self, name, path |
|
22 | def location_url(self, name, path): | |
23 | """Return the full URL location of a file. |
|
23 | """Return the full URL location of a file. | |
24 |
|
24 | |||
25 | Parameters |
|
25 | Parameters | |
26 | ---------- |
|
26 | ---------- | |
27 | name : unicode |
|
27 | name : unicode | |
28 | The base name of the file, such as "foo.ipynb". |
|
28 | The base name of the file, such as "foo.ipynb". | |
29 | path : unicode |
|
29 | path : unicode | |
30 | The API path of the file, such as "foo/bar". |
|
30 | The API path of the file, such as "foo/bar". | |
31 | """ |
|
31 | """ | |
32 | return url_escape(url_path_join( |
|
32 | return url_escape(url_path_join( | |
33 | self.base_url, 'api', 'contents', path, name |
|
33 | self.base_url, 'api', 'contents', path, name | |
34 | )) |
|
34 | )) | |
35 |
|
35 | |||
36 | def _finish_model(self, model, location=True): |
|
36 | def _finish_model(self, model, location=True): | |
37 | """Finish a JSON request with a model, setting relevant headers, etc.""" |
|
37 | """Finish a JSON request with a model, setting relevant headers, etc.""" | |
38 | if location: |
|
38 | if location: | |
39 | location = self.location_url(model['name'], model['path']) |
|
39 | location = self.location_url(model['name'], model['path']) | |
40 | self.set_header('Location', location) |
|
40 | self.set_header('Location', location) | |
41 | self.set_header('Last-Modified', model['last_modified']) |
|
41 | self.set_header('Last-Modified', model['last_modified']) | |
42 | self.finish(json.dumps(model, default=date_default)) |
|
42 | self.finish(json.dumps(model, default=date_default)) | |
43 |
|
43 | |||
44 | @web.authenticated |
|
44 | @web.authenticated | |
45 | @json_errors |
|
45 | @json_errors | |
46 | def get(self, path='', name=None): |
|
46 | def get(self, path='', name=None): | |
47 | """Return a file or list of files. |
|
47 | """Return a file or list of files. | |
48 |
|
48 | |||
49 | * GET with path and no filename lists files in a directory |
|
49 | * GET with path and no filename lists files in a directory | |
50 | * GET with path and filename returns file contents model |
|
50 | * GET with path and filename returns file contents model | |
51 | """ |
|
51 | """ | |
52 | cm = self.contents_manager |
|
52 | path = path or '' | |
53 | # Check to see if a filename was given |
|
53 | model = self.contents_manager.get_model(name=name, path=path) | |
54 | if name is None: |
|
54 | if model['type'] == 'directory': | |
55 | # TODO: Remove this after we create the contents web service and directories are |
|
55 | # resort listing to group directories at the top | |
56 | # no longer listed by the notebook web service. This should only handle notebooks |
|
56 | dirs = [] | |
57 | # and not directories. |
|
|||
58 | dirs = cm.list_dirs(path) |
|
|||
59 | files = [] |
|
57 | files = [] | |
60 | index = [] |
|
58 | for entry in model['content']: | |
61 | for nb in cm.list_files(path): |
|
59 | if entry['type'] == 'directory': | |
62 | if nb['name'].lower() == 'index.ipynb': |
|
60 | dirs.append(entry) | |
63 | index.append(nb) |
|
|||
64 | else: |
|
61 | else: | |
65 | files.append(nb) |
|
62 | # do we also want to group notebooks separate from files? | |
66 | files = index + dirs + files |
|
63 | files.append(entry) | |
67 | self.finish(json.dumps(files, default=date_default)) |
|
64 | model['content'] = dirs + files | |
68 | return |
|
|||
69 | # get and return notebook representation |
|
|||
70 | model = cm.get(name, path) |
|
|||
71 | self._finish_model(model, location=False) |
|
65 | self._finish_model(model, location=False) | |
72 |
|
66 | |||
73 | @web.authenticated |
|
67 | @web.authenticated | |
74 | @json_errors |
|
68 | @json_errors | |
75 | def patch(self, path='', name=None): |
|
69 | def patch(self, path='', name=None): | |
76 | """PATCH renames a notebook without re-uploading content.""" |
|
70 | """PATCH renames a notebook without re-uploading content.""" | |
77 | cm = self.contents_manager |
|
71 | cm = self.contents_manager | |
78 | if name is None: |
|
72 | if name is None: | |
79 | raise web.HTTPError(400, u'Filename missing') |
|
73 | raise web.HTTPError(400, u'Filename missing') | |
80 | model = self.get_json_body() |
|
74 | model = self.get_json_body() | |
81 | if model is None: |
|
75 | if model is None: | |
82 | raise web.HTTPError(400, u'JSON body missing') |
|
76 | raise web.HTTPError(400, u'JSON body missing') | |
83 | model = cm.update(model, name, path) |
|
77 | model = cm.update(model, name, path) | |
84 | self._finish_model(model) |
|
78 | self._finish_model(model) | |
85 |
|
79 | |||
86 | def _copy(self, copy_from, path, copy_to=None): |
|
80 | def _copy(self, copy_from, path, copy_to=None): | |
87 | """Copy a file in path, optionally specifying the new name. |
|
81 | """Copy a file in path, optionally specifying the new name. | |
88 |
|
82 | |||
89 | Only support copying within the same directory. |
|
83 | Only support copying within the same directory. | |
90 | """ |
|
84 | """ | |
91 | self.log.info(u"Copying from %s/%s to %s/%s", |
|
85 | self.log.info(u"Copying from %s/%s to %s/%s", | |
92 | path, copy_from, |
|
86 | path, copy_from, | |
93 | path, copy_to or '', |
|
87 | path, copy_to or '', | |
94 | ) |
|
88 | ) | |
95 | model = self.contents_manager.copy(copy_from, copy_to, path) |
|
89 | model = self.contents_manager.copy(copy_from, copy_to, path) | |
96 | self.set_status(201) |
|
90 | self.set_status(201) | |
97 | self._finish_model(model) |
|
91 | self._finish_model(model) | |
98 |
|
92 | |||
99 | def _upload(self, model, path, name=None): |
|
93 | def _upload(self, model, path, name=None): | |
100 | """Upload a file |
|
94 | """Upload a file | |
101 |
|
95 | |||
102 | If name specified, create it in path/name. |
|
96 | If name specified, create it in path/name. | |
103 | """ |
|
97 | """ | |
104 | self.log.info(u"Uploading file to %s/%s", path, name or '') |
|
98 | self.log.info(u"Uploading file to %s/%s", path, name or '') | |
105 | if name: |
|
99 | if name: | |
106 | model['name'] = name |
|
100 | model['name'] = name | |
107 |
|
101 | |||
108 | model = self.contents_manager.create_notebook(model, path) |
|
102 | model = self.contents_manager.create_notebook(model, path) | |
109 | self.set_status(201) |
|
103 | self.set_status(201) | |
110 | self._finish_model(model) |
|
104 | self._finish_model(model) | |
111 |
|
105 | |||
112 | def _create_empty_notebook(self, path, name=None): |
|
106 | def _create_empty_notebook(self, path, name=None): | |
113 | """Create an empty notebook in path |
|
107 | """Create an empty notebook in path | |
114 |
|
108 | |||
115 | If name specified, create it in path/name. |
|
109 | If name specified, create it in path/name. | |
116 | """ |
|
110 | """ | |
117 | self.log.info(u"Creating new notebook in %s/%s", path, name or '') |
|
111 | self.log.info(u"Creating new notebook in %s/%s", path, name or '') | |
118 | model = {} |
|
112 | model = {} | |
119 | if name: |
|
113 | if name: | |
120 | model['name'] = name |
|
114 | model['name'] = name | |
121 | model = self.contents_manager.create_notebook(model, path=path) |
|
115 | model = self.contents_manager.create_notebook(model, path=path) | |
122 | self.set_status(201) |
|
116 | self.set_status(201) | |
123 | self._finish_model(model) |
|
117 | self._finish_model(model) | |
124 |
|
118 | |||
125 | def _save(self, model, path, name): |
|
119 | def _save(self, model, path, name): | |
126 | """Save an existing file.""" |
|
120 | """Save an existing file.""" | |
127 | self.log.info(u"Saving file at %s/%s", path, name) |
|
121 | self.log.info(u"Saving file at %s/%s", path, name) | |
128 | model = self.contents_manager.save(model, name, path) |
|
122 | model = self.contents_manager.save(model, name, path) | |
129 | if model['path'] != path.strip('/') or model['name'] != name: |
|
123 | if model['path'] != path.strip('/') or model['name'] != name: | |
130 | # a rename happened, set Location header |
|
124 | # a rename happened, set Location header | |
131 | location = True |
|
125 | location = True | |
132 | else: |
|
126 | else: | |
133 | location = False |
|
127 | location = False | |
134 | self._finish_model(model, location) |
|
128 | self._finish_model(model, location) | |
135 |
|
129 | |||
136 | @web.authenticated |
|
130 | @web.authenticated | |
137 | @json_errors |
|
131 | @json_errors | |
138 | def post(self, path='', name=None): |
|
132 | def post(self, path='', name=None): | |
139 | """Create a new notebook in the specified path. |
|
133 | """Create a new notebook in the specified path. | |
140 |
|
134 | |||
141 | POST creates new notebooks. The server always decides on the notebook name. |
|
135 | POST creates new notebooks. The server always decides on the notebook name. | |
142 |
|
136 | |||
143 | POST /api/contents/path |
|
137 | POST /api/contents/path | |
144 | New untitled notebook in path. If content specified, upload a |
|
138 | New untitled notebook in path. If content specified, upload a | |
145 | notebook, otherwise start empty. |
|
139 | notebook, otherwise start empty. | |
146 | POST /api/contents/path?copy=OtherNotebook.ipynb |
|
140 | POST /api/contents/path?copy=OtherNotebook.ipynb | |
147 | New copy of OtherNotebook in path |
|
141 | New copy of OtherNotebook in path | |
148 | """ |
|
142 | """ | |
149 |
|
143 | |||
150 | if name is not None: |
|
144 | if name is not None: | |
|
145 | path = u'{}/{}'.format(path, name) | |||
|
146 | ||||
|
147 | cm = self.contents_manager | |||
|
148 | ||||
|
149 | if cm.file_exists(path): | |||
151 | raise web.HTTPError(400, "Only POST to directories. Use PUT for full names.") |
|
150 | raise web.HTTPError(400, "Only POST to directories. Use PUT for full names.") | |
152 |
|
151 | |||
|
152 | if not cm.path_exists(path): | |||
|
153 | raise web.HTTPError(404, "No such directory: %s" % path) | |||
|
154 | ||||
153 | model = self.get_json_body() |
|
155 | model = self.get_json_body() | |
154 |
|
156 | |||
155 | if model is not None: |
|
157 | if model is not None: | |
156 | copy_from = model.get('copy_from') |
|
158 | copy_from = model.get('copy_from') | |
157 | if copy_from: |
|
159 | if copy_from: | |
158 | if model.get('content'): |
|
160 | if model.get('content'): | |
159 | raise web.HTTPError(400, "Can't upload and copy at the same time.") |
|
161 | raise web.HTTPError(400, "Can't upload and copy at the same time.") | |
160 | self._copy(copy_from, path) |
|
162 | self._copy(copy_from, path) | |
161 | else: |
|
163 | else: | |
162 | self._upload(model, path) |
|
164 | self._upload(model, path) | |
163 | else: |
|
165 | else: | |
164 | self._create_empty_notebook(path) |
|
166 | self._create_empty_notebook(path) | |
165 |
|
167 | |||
166 | @web.authenticated |
|
168 | @web.authenticated | |
167 | @json_errors |
|
169 | @json_errors | |
168 | def put(self, path='', name=None): |
|
170 | def put(self, path='', name=None): | |
169 | """Saves the file in the location specified by name and path. |
|
171 | """Saves the file in the location specified by name and path. | |
170 |
|
172 | |||
171 | PUT is very similar to POST, but the requester specifies the name, |
|
173 | PUT is very similar to POST, but the requester specifies the name, | |
172 | whereas with POST, the server picks the name. |
|
174 | whereas with POST, the server picks the name. | |
173 |
|
175 | |||
174 | PUT /api/contents/path/Name.ipynb |
|
176 | PUT /api/contents/path/Name.ipynb | |
175 | Save notebook at ``path/Name.ipynb``. Notebook structure is specified |
|
177 | Save notebook at ``path/Name.ipynb``. Notebook structure is specified | |
176 | in `content` key of JSON request body. If content is not specified, |
|
178 | in `content` key of JSON request body. If content is not specified, | |
177 | create a new empty notebook. |
|
179 | create a new empty notebook. | |
178 | PUT /api/contents/path/Name.ipynb?copy=OtherNotebook.ipynb |
|
180 | PUT /api/contents/path/Name.ipynb?copy=OtherNotebook.ipynb | |
179 | Copy OtherNotebook to Name |
|
181 | Copy OtherNotebook to Name | |
180 | """ |
|
182 | """ | |
181 | if name is None: |
|
183 | if name is None: | |
182 | raise web.HTTPError(400, "Only PUT to full names. Use POST for directories.") |
|
184 | raise web.HTTPError(400, "Only PUT to full names. Use POST for directories.") | |
183 |
|
185 | |||
184 | model = self.get_json_body() |
|
186 | model = self.get_json_body() | |
185 | if model: |
|
187 | if model: | |
186 | copy_from = model.get('copy_from') |
|
188 | copy_from = model.get('copy_from') | |
187 | if copy_from: |
|
189 | if copy_from: | |
188 | if model.get('content'): |
|
190 | if model.get('content'): | |
189 | raise web.HTTPError(400, "Can't upload and copy at the same time.") |
|
191 | raise web.HTTPError(400, "Can't upload and copy at the same time.") | |
190 | self._copy(copy_from, path, name) |
|
192 | self._copy(copy_from, path, name) | |
191 | elif self.contents_manager.file_exists(name, path): |
|
193 | elif self.contents_manager.file_exists(name, path): | |
192 | self._save(model, path, name) |
|
194 | self._save(model, path, name) | |
193 | else: |
|
195 | else: | |
194 | self._upload(model, path, name) |
|
196 | self._upload(model, path, name) | |
195 | else: |
|
197 | else: | |
196 | self._create_empty_notebook(path, name) |
|
198 | self._create_empty_notebook(path, name) | |
197 |
|
199 | |||
198 | @web.authenticated |
|
200 | @web.authenticated | |
199 | @json_errors |
|
201 | @json_errors | |
200 | def delete(self, path='', name=None): |
|
202 | def delete(self, path='', name=None): | |
201 | """delete a file in the given path""" |
|
203 | """delete a file in the given path""" | |
202 | cm = self.contents_manager |
|
204 | cm = self.contents_manager | |
|
205 | self.log.warn('delete %s:%s', path, name) | |||
203 | cm.delete(name, path) |
|
206 | cm.delete(name, path) | |
204 | self.set_status(204) |
|
207 | self.set_status(204) | |
205 | self.finish() |
|
208 | self.finish() | |
206 |
|
209 | |||
207 |
|
210 | |||
208 | class CheckpointsHandler(IPythonHandler): |
|
211 | class CheckpointsHandler(IPythonHandler): | |
209 |
|
212 | |||
210 | SUPPORTED_METHODS = ('GET', 'POST') |
|
213 | SUPPORTED_METHODS = ('GET', 'POST') | |
211 |
|
214 | |||
212 | @web.authenticated |
|
215 | @web.authenticated | |
213 | @json_errors |
|
216 | @json_errors | |
214 | def get(self, path='', name=None): |
|
217 | def get(self, path='', name=None): | |
215 | """get lists checkpoints for a file""" |
|
218 | """get lists checkpoints for a file""" | |
216 | cm = self.contents_manager |
|
219 | cm = self.contents_manager | |
217 | checkpoints = cm.list_checkpoints(name, path) |
|
220 | checkpoints = cm.list_checkpoints(name, path) | |
218 | data = json.dumps(checkpoints, default=date_default) |
|
221 | data = json.dumps(checkpoints, default=date_default) | |
219 | self.finish(data) |
|
222 | self.finish(data) | |
220 |
|
223 | |||
221 | @web.authenticated |
|
224 | @web.authenticated | |
222 | @json_errors |
|
225 | @json_errors | |
223 | def post(self, path='', name=None): |
|
226 | def post(self, path='', name=None): | |
224 | """post creates a new checkpoint""" |
|
227 | """post creates a new checkpoint""" | |
225 | cm = self.contents_manager |
|
228 | cm = self.contents_manager | |
226 | checkpoint = cm.create_checkpoint(name, path) |
|
229 | checkpoint = cm.create_checkpoint(name, path) | |
227 | data = json.dumps(checkpoint, default=date_default) |
|
230 | data = json.dumps(checkpoint, default=date_default) | |
228 | location = url_path_join(self.base_url, 'api/contents', |
|
231 | location = url_path_join(self.base_url, 'api/contents', | |
229 | path, name, 'checkpoints', checkpoint['id']) |
|
232 | path, name, 'checkpoints', checkpoint['id']) | |
230 | self.set_header('Location', url_escape(location)) |
|
233 | self.set_header('Location', url_escape(location)) | |
231 | self.set_status(201) |
|
234 | self.set_status(201) | |
232 | self.finish(data) |
|
235 | self.finish(data) | |
233 |
|
236 | |||
234 |
|
237 | |||
235 | class ModifyCheckpointsHandler(IPythonHandler): |
|
238 | class ModifyCheckpointsHandler(IPythonHandler): | |
236 |
|
239 | |||
237 | SUPPORTED_METHODS = ('POST', 'DELETE') |
|
240 | SUPPORTED_METHODS = ('POST', 'DELETE') | |
238 |
|
241 | |||
239 | @web.authenticated |
|
242 | @web.authenticated | |
240 | @json_errors |
|
243 | @json_errors | |
241 | def post(self, path, name, checkpoint_id): |
|
244 | def post(self, path, name, checkpoint_id): | |
242 | """post restores a file from a checkpoint""" |
|
245 | """post restores a file from a checkpoint""" | |
243 | cm = self.contents_manager |
|
246 | cm = self.contents_manager | |
244 | cm.restore_checkpoint(checkpoint_id, name, path) |
|
247 | cm.restore_checkpoint(checkpoint_id, name, path) | |
245 | self.set_status(204) |
|
248 | self.set_status(204) | |
246 | self.finish() |
|
249 | self.finish() | |
247 |
|
250 | |||
248 | @web.authenticated |
|
251 | @web.authenticated | |
249 | @json_errors |
|
252 | @json_errors | |
250 | def delete(self, path, name, checkpoint_id): |
|
253 | def delete(self, path, name, checkpoint_id): | |
251 | """delete clears a checkpoint for a given file""" |
|
254 | """delete clears a checkpoint for a given file""" | |
252 | cm = self.contents_manager |
|
255 | cm = self.contents_manager | |
253 | cm.delete_checkpoint(checkpoint_id, name, path) |
|
256 | cm.delete_checkpoint(checkpoint_id, name, path) | |
254 | self.set_status(204) |
|
257 | self.set_status(204) | |
255 | self.finish() |
|
258 | self.finish() | |
256 |
|
259 | |||
257 | #----------------------------------------------------------------------------- |
|
260 | #----------------------------------------------------------------------------- | |
258 | # URL to handler mappings |
|
261 | # URL to handler mappings | |
259 | #----------------------------------------------------------------------------- |
|
262 | #----------------------------------------------------------------------------- | |
260 |
|
263 | |||
261 |
|
264 | |||
262 | _checkpoint_id_regex = r"(?P<checkpoint_id>[\w-]+)" |
|
265 | _checkpoint_id_regex = r"(?P<checkpoint_id>[\w-]+)" | |
263 |
|
266 | |||
264 | default_handlers = [ |
|
267 | default_handlers = [ | |
265 |
(r"/api/contents%s/checkpoints" % |
|
268 | (r"/api/contents%s/checkpoints" % file_path_regex, CheckpointsHandler), | |
266 |
(r"/api/contents%s/checkpoints/%s" % ( |
|
269 | (r"/api/contents%s/checkpoints/%s" % (file_path_regex, _checkpoint_id_regex), | |
267 | ModifyCheckpointsHandler), |
|
270 | ModifyCheckpointsHandler), | |
268 |
(r"/api/contents%s" % |
|
271 | (r"/api/contents%s" % file_path_regex, ContentsHandler), | |
269 | (r"/api/contents%s" % path_regex, ContentsHandler), |
|
272 | (r"/api/contents%s" % path_regex, ContentsHandler), | |
270 | ] |
|
273 | ] |
@@ -1,267 +1,247 b'' | |||||
1 | """A base class for contents managers.""" |
|
1 | """A base class for contents managers.""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | from fnmatch import fnmatch |
|
6 | from fnmatch import fnmatch | |
7 | import itertools |
|
7 | import itertools | |
8 | import os |
|
8 | import os | |
9 |
|
9 | |||
10 | from IPython.config.configurable import LoggingConfigurable |
|
10 | from IPython.config.configurable import LoggingConfigurable | |
11 | from IPython.nbformat import current, sign |
|
11 | from IPython.nbformat import current, sign | |
12 | from IPython.utils.traitlets import Instance, Unicode, List |
|
12 | from IPython.utils.traitlets import Instance, Unicode, List | |
13 |
|
13 | |||
14 |
|
14 | |||
15 | class ContentsManager(LoggingConfigurable): |
|
15 | class ContentsManager(LoggingConfigurable): | |
16 |
|
16 | |||
17 | notary = Instance(sign.NotebookNotary) |
|
17 | notary = Instance(sign.NotebookNotary) | |
18 | def _notary_default(self): |
|
18 | def _notary_default(self): | |
19 | return sign.NotebookNotary(parent=self) |
|
19 | return sign.NotebookNotary(parent=self) | |
20 |
|
20 | |||
21 | hide_globs = List(Unicode, [u'__pycache__'], config=True, help=""" |
|
21 | hide_globs = List(Unicode, [u'__pycache__'], config=True, help=""" | |
22 | Glob patterns to hide in file and directory listings. |
|
22 | Glob patterns to hide in file and directory listings. | |
23 | """) |
|
23 | """) | |
24 |
|
24 | |||
25 | # ContentsManager API part 1: methods that must be |
|
25 | # ContentsManager API part 1: methods that must be | |
26 | # implemented in subclasses. |
|
26 | # implemented in subclasses. | |
27 |
|
27 | |||
28 | def path_exists(self, path): |
|
28 | def path_exists(self, path): | |
29 | """Does the API-style path (directory) actually exist? |
|
29 | """Does the API-style path (directory) actually exist? | |
30 |
|
30 | |||
31 | Override this method in subclasses. |
|
31 | Override this method in subclasses. | |
32 |
|
32 | |||
33 | Parameters |
|
33 | Parameters | |
34 | ---------- |
|
34 | ---------- | |
35 | path : string |
|
35 | path : string | |
36 | The path to check |
|
36 | The path to check | |
37 |
|
37 | |||
38 | Returns |
|
38 | Returns | |
39 | ------- |
|
39 | ------- | |
40 | exists : bool |
|
40 | exists : bool | |
41 | Whether the path does indeed exist. |
|
41 | Whether the path does indeed exist. | |
42 | """ |
|
42 | """ | |
43 | raise NotImplementedError |
|
43 | raise NotImplementedError | |
44 |
|
44 | |||
45 | def is_hidden(self, path): |
|
45 | def is_hidden(self, path): | |
46 | """Does the API style path correspond to a hidden directory or file? |
|
46 | """Does the API style path correspond to a hidden directory or file? | |
47 |
|
47 | |||
48 | Parameters |
|
48 | Parameters | |
49 | ---------- |
|
49 | ---------- | |
50 | path : string |
|
50 | path : string | |
51 | The path to check. This is an API path (`/` separated, |
|
51 | The path to check. This is an API path (`/` separated, | |
52 | relative to root dir). |
|
52 | relative to root dir). | |
53 |
|
53 | |||
54 | Returns |
|
54 | Returns | |
55 | ------- |
|
55 | ------- | |
56 | exists : bool |
|
56 | exists : bool | |
57 | Whether the path is hidden. |
|
57 | Whether the path is hidden. | |
58 |
|
58 | |||
59 | """ |
|
59 | """ | |
60 | raise NotImplementedError |
|
60 | raise NotImplementedError | |
61 |
|
61 | |||
62 | def file_exists(self, name, path=''): |
|
62 | def file_exists(self, name, path=''): | |
63 | """Returns a True if the notebook exists. Else, returns False. |
|
63 | """Returns a True if the notebook exists. Else, returns False. | |
64 |
|
64 | |||
65 | Parameters |
|
65 | Parameters | |
66 | ---------- |
|
66 | ---------- | |
67 | name : string |
|
67 | name : string | |
68 | The name of the notebook you are checking. |
|
68 | The name of the notebook you are checking. | |
69 | path : string |
|
69 | path : string | |
70 | The relative path to the notebook (with '/' as separator) |
|
70 | The relative path to the notebook (with '/' as separator) | |
71 |
|
71 | |||
72 | Returns |
|
72 | Returns | |
73 | ------- |
|
73 | ------- | |
74 | bool |
|
74 | bool | |
75 | """ |
|
75 | """ | |
76 | raise NotImplementedError('must be implemented in a subclass') |
|
76 | raise NotImplementedError('must be implemented in a subclass') | |
77 |
|
77 | |||
78 | # TODO: Remove this after we create the contents web service and directories are |
|
78 | def list(self, path=''): | |
79 | # no longer listed by the notebook web service. |
|
|||
80 | def list_dirs(self, path): |
|
|||
81 | """List the directory models for a given API style path.""" |
|
|||
82 | raise NotImplementedError('must be implemented in a subclass') |
|
|||
83 |
|
||||
84 | # TODO: Remove this after we create the contents web service and directories are |
|
|||
85 | # no longer listed by the notebook web service. |
|
|||
86 | def get_dir_model(self, name, path=''): |
|
|||
87 | """Get the directory model given a directory name and its API style path. |
|
|||
88 |
|
||||
89 | The keys in the model should be: |
|
|||
90 | * name |
|
|||
91 | * path |
|
|||
92 | * last_modified |
|
|||
93 | * created |
|
|||
94 | * type='directory' |
|
|||
95 | """ |
|
|||
96 | raise NotImplementedError('must be implemented in a subclass') |
|
|||
97 |
|
||||
98 | def list_files(self, path=''): |
|
|||
99 | """Return a list of contents dicts without content. |
|
79 | """Return a list of contents dicts without content. | |
100 |
|
80 | |||
101 | This returns a list of dicts |
|
81 | This returns a list of dicts | |
102 |
|
82 | |||
103 | This list of dicts should be sorted by name:: |
|
83 | This list of dicts should be sorted by name:: | |
104 |
|
84 | |||
105 | data = sorted(data, key=lambda item: item['name']) |
|
85 | data = sorted(data, key=lambda item: item['name']) | |
106 | """ |
|
86 | """ | |
107 | raise NotImplementedError('must be implemented in a subclass') |
|
87 | raise NotImplementedError('must be implemented in a subclass') | |
108 |
|
88 | |||
109 | def get_model(self, name, path='', content=True): |
|
89 | def get_model(self, name, path='', content=True): | |
110 | """Get the notebook model with or without content.""" |
|
90 | """Get the notebook model with or without content.""" | |
111 | raise NotImplementedError('must be implemented in a subclass') |
|
91 | raise NotImplementedError('must be implemented in a subclass') | |
112 |
|
92 | |||
113 | def save(self, model, name, path=''): |
|
93 | def save(self, model, name, path=''): | |
114 | """Save the notebook and return the model with no content.""" |
|
94 | """Save the notebook and return the model with no content.""" | |
115 | raise NotImplementedError('must be implemented in a subclass') |
|
95 | raise NotImplementedError('must be implemented in a subclass') | |
116 |
|
96 | |||
117 | def update(self, model, name, path=''): |
|
97 | def update(self, model, name, path=''): | |
118 | """Update the notebook and return the model with no content.""" |
|
98 | """Update the notebook and return the model with no content.""" | |
119 | raise NotImplementedError('must be implemented in a subclass') |
|
99 | raise NotImplementedError('must be implemented in a subclass') | |
120 |
|
100 | |||
121 | def delete(self, name, path=''): |
|
101 | def delete(self, name, path=''): | |
122 | """Delete notebook by name and path.""" |
|
102 | """Delete notebook by name and path.""" | |
123 | raise NotImplementedError('must be implemented in a subclass') |
|
103 | raise NotImplementedError('must be implemented in a subclass') | |
124 |
|
104 | |||
125 | def create_checkpoint(self, name, path=''): |
|
105 | def create_checkpoint(self, name, path=''): | |
126 | """Create a checkpoint of the current state of a notebook |
|
106 | """Create a checkpoint of the current state of a notebook | |
127 |
|
107 | |||
128 | Returns a checkpoint_id for the new checkpoint. |
|
108 | Returns a checkpoint_id for the new checkpoint. | |
129 | """ |
|
109 | """ | |
130 | raise NotImplementedError("must be implemented in a subclass") |
|
110 | raise NotImplementedError("must be implemented in a subclass") | |
131 |
|
111 | |||
132 | def list_checkpoints(self, name, path=''): |
|
112 | def list_checkpoints(self, name, path=''): | |
133 | """Return a list of checkpoints for a given notebook""" |
|
113 | """Return a list of checkpoints for a given notebook""" | |
134 | return [] |
|
114 | return [] | |
135 |
|
115 | |||
136 | def restore_checkpoint(self, checkpoint_id, name, path=''): |
|
116 | def restore_checkpoint(self, checkpoint_id, name, path=''): | |
137 | """Restore a notebook from one of its checkpoints""" |
|
117 | """Restore a notebook from one of its checkpoints""" | |
138 | raise NotImplementedError("must be implemented in a subclass") |
|
118 | raise NotImplementedError("must be implemented in a subclass") | |
139 |
|
119 | |||
140 | def delete_checkpoint(self, checkpoint_id, name, path=''): |
|
120 | def delete_checkpoint(self, checkpoint_id, name, path=''): | |
141 | """delete a checkpoint for a notebook""" |
|
121 | """delete a checkpoint for a notebook""" | |
142 | raise NotImplementedError("must be implemented in a subclass") |
|
122 | raise NotImplementedError("must be implemented in a subclass") | |
143 |
|
123 | |||
144 | def info_string(self): |
|
124 | def info_string(self): | |
145 | return "Serving notebooks" |
|
125 | return "Serving notebooks" | |
146 |
|
126 | |||
147 | # ContentsManager API part 2: methods that have useable default |
|
127 | # ContentsManager API part 2: methods that have useable default | |
148 | # implementations, but can be overridden in subclasses. |
|
128 | # implementations, but can be overridden in subclasses. | |
149 |
|
129 | |||
150 | def get_kernel_path(self, name, path='', model=None): |
|
130 | def get_kernel_path(self, name, path='', model=None): | |
151 | """ Return the path to start kernel in """ |
|
131 | """ Return the path to start kernel in """ | |
152 | return path |
|
132 | return path | |
153 |
|
133 | |||
154 | def increment_filename(self, filename, path=''): |
|
134 | def increment_filename(self, filename, path=''): | |
155 | """Increment a filename until it is unique. |
|
135 | """Increment a filename until it is unique. | |
156 |
|
136 | |||
157 | Parameters |
|
137 | Parameters | |
158 | ---------- |
|
138 | ---------- | |
159 | filename : unicode |
|
139 | filename : unicode | |
160 | The name of a file, including extension |
|
140 | The name of a file, including extension | |
161 | path : unicode |
|
141 | path : unicode | |
162 | The URL path of the notebooks directory |
|
142 | The URL path of the notebooks directory | |
163 |
|
143 | |||
164 | Returns |
|
144 | Returns | |
165 | ------- |
|
145 | ------- | |
166 | name : unicode |
|
146 | name : unicode | |
167 | A filename that is unique, based on the input filename. |
|
147 | A filename that is unique, based on the input filename. | |
168 | """ |
|
148 | """ | |
169 | path = path.strip('/') |
|
149 | path = path.strip('/') | |
170 | basename, ext = os.path.splitext(filename) |
|
150 | basename, ext = os.path.splitext(filename) | |
171 | for i in itertools.count(): |
|
151 | for i in itertools.count(): | |
172 | name = u'{basename}{i}{ext}'.format(basename=basename, i=i, |
|
152 | name = u'{basename}{i}{ext}'.format(basename=basename, i=i, | |
173 | ext=ext) |
|
153 | ext=ext) | |
174 | if not self.file_exists(name, path): |
|
154 | if not self.file_exists(name, path): | |
175 | break |
|
155 | break | |
176 | return name |
|
156 | return name | |
177 |
|
157 | |||
178 | def create_notebook(self, model=None, path=''): |
|
158 | def create_notebook(self, model=None, path=''): | |
179 | """Create a new notebook and return its model with no content.""" |
|
159 | """Create a new notebook and return its model with no content.""" | |
180 | path = path.strip('/') |
|
160 | path = path.strip('/') | |
181 | if model is None: |
|
161 | if model is None: | |
182 | model = {} |
|
162 | model = {} | |
183 | if 'content' not in model: |
|
163 | if 'content' not in model: | |
184 | metadata = current.new_metadata(name=u'') |
|
164 | metadata = current.new_metadata(name=u'') | |
185 | model['content'] = current.new_notebook(metadata=metadata) |
|
165 | model['content'] = current.new_notebook(metadata=metadata) | |
186 | if 'name' not in model: |
|
166 | if 'name' not in model: | |
187 | model['name'] = self.increment_filename('Untitled.ipynb', path) |
|
167 | model['name'] = self.increment_filename('Untitled.ipynb', path) | |
188 |
|
168 | |||
189 | model['path'] = path |
|
169 | model['path'] = path | |
190 | model = self.save(model, model['name'], model['path']) |
|
170 | model = self.save(model, model['name'], model['path']) | |
191 | return model |
|
171 | return model | |
192 |
|
172 | |||
193 | def copy(self, from_name, to_name=None, path=''): |
|
173 | def copy(self, from_name, to_name=None, path=''): | |
194 | """Copy an existing file and return its new model. |
|
174 | """Copy an existing file and return its new model. | |
195 |
|
175 | |||
196 | If to_name not specified, increment `from_name-Copy#.ipynb`. |
|
176 | If to_name not specified, increment `from_name-Copy#.ipynb`. | |
197 | """ |
|
177 | """ | |
198 | path = path.strip('/') |
|
178 | path = path.strip('/') | |
199 | model = self.get(from_name, path) |
|
179 | model = self.get_model(from_name, path) | |
200 | if not to_name: |
|
180 | if not to_name: | |
201 | base, ext = os.path.splitext(from_name) |
|
181 | base, ext = os.path.splitext(from_name) | |
202 | copy_name = u'{0}-Copy{1}'.format(base, ext) |
|
182 | copy_name = u'{0}-Copy{1}'.format(base, ext) | |
203 | to_name = self.increment_filename(copy_name, path) |
|
183 | to_name = self.increment_filename(copy_name, path) | |
204 | model['name'] = to_name |
|
184 | model['name'] = to_name | |
205 | model = self.save(model, to_name, path) |
|
185 | model = self.save(model, to_name, path) | |
206 | return model |
|
186 | return model | |
207 |
|
187 | |||
208 | def log_info(self): |
|
188 | def log_info(self): | |
209 | self.log.info(self.info_string()) |
|
189 | self.log.info(self.info_string()) | |
210 |
|
190 | |||
211 | def trust_notebook(self, name, path=''): |
|
191 | def trust_notebook(self, name, path=''): | |
212 | """Explicitly trust a notebook |
|
192 | """Explicitly trust a notebook | |
213 |
|
193 | |||
214 | Parameters |
|
194 | Parameters | |
215 | ---------- |
|
195 | ---------- | |
216 | name : string |
|
196 | name : string | |
217 | The filename of the notebook |
|
197 | The filename of the notebook | |
218 | path : string |
|
198 | path : string | |
219 | The notebook's directory |
|
199 | The notebook's directory | |
220 | """ |
|
200 | """ | |
221 | model = self.get(name, path) |
|
201 | model = self.get_model(name, path) | |
222 | nb = model['content'] |
|
202 | nb = model['content'] | |
223 | self.log.warn("Trusting notebook %s/%s", path, name) |
|
203 | self.log.warn("Trusting notebook %s/%s", path, name) | |
224 | self.notary.mark_cells(nb, True) |
|
204 | self.notary.mark_cells(nb, True) | |
225 | self.save(model, name, path) |
|
205 | self.save(model, name, path) | |
226 |
|
206 | |||
227 | def check_and_sign(self, nb, name, path=''): |
|
207 | def check_and_sign(self, nb, name, path=''): | |
228 | """Check for trusted cells, and sign the notebook. |
|
208 | """Check for trusted cells, and sign the notebook. | |
229 |
|
209 | |||
230 | Called as a part of saving notebooks. |
|
210 | Called as a part of saving notebooks. | |
231 |
|
211 | |||
232 | Parameters |
|
212 | Parameters | |
233 | ---------- |
|
213 | ---------- | |
234 | nb : dict |
|
214 | nb : dict | |
235 | The notebook structure |
|
215 | The notebook structure | |
236 | name : string |
|
216 | name : string | |
237 | The filename of the notebook |
|
217 | The filename of the notebook | |
238 | path : string |
|
218 | path : string | |
239 | The notebook's directory |
|
219 | The notebook's directory | |
240 | """ |
|
220 | """ | |
241 | if self.notary.check_cells(nb): |
|
221 | if self.notary.check_cells(nb): | |
242 | self.notary.sign(nb) |
|
222 | self.notary.sign(nb) | |
243 | else: |
|
223 | else: | |
244 | self.log.warn("Saving untrusted notebook %s/%s", path, name) |
|
224 | self.log.warn("Saving untrusted notebook %s/%s", path, name) | |
245 |
|
225 | |||
246 | def mark_trusted_cells(self, nb, name, path=''): |
|
226 | def mark_trusted_cells(self, nb, name, path=''): | |
247 | """Mark cells as trusted if the notebook signature matches. |
|
227 | """Mark cells as trusted if the notebook signature matches. | |
248 |
|
228 | |||
249 | Called as a part of loading notebooks. |
|
229 | Called as a part of loading notebooks. | |
250 |
|
230 | |||
251 | Parameters |
|
231 | Parameters | |
252 | ---------- |
|
232 | ---------- | |
253 | nb : dict |
|
233 | nb : dict | |
254 | The notebook structure |
|
234 | The notebook structure | |
255 | name : string |
|
235 | name : string | |
256 | The filename of the notebook |
|
236 | The filename of the notebook | |
257 | path : string |
|
237 | path : string | |
258 | The notebook's directory |
|
238 | The notebook's directory | |
259 | """ |
|
239 | """ | |
260 | trusted = self.notary.check_signature(nb) |
|
240 | trusted = self.notary.check_signature(nb) | |
261 | if not trusted: |
|
241 | if not trusted: | |
262 | self.log.warn("Notebook %s/%s is not trusted", path, name) |
|
242 | self.log.warn("Notebook %s/%s is not trusted", path, name) | |
263 | self.notary.mark_cells(nb, trusted) |
|
243 | self.notary.mark_cells(nb, trusted) | |
264 |
|
244 | |||
265 | def should_list(self, name): |
|
245 | def should_list(self, name): | |
266 | """Should this file/directory name be displayed in a listing?""" |
|
246 | """Should this file/directory name be displayed in a listing?""" | |
267 | return not any(fnmatch(name, glob) for glob in self.hide_globs) |
|
247 | return not any(fnmatch(name, glob) for glob in self.hide_globs) |
@@ -1,346 +1,400 b'' | |||||
1 | # coding: utf-8 |
|
1 | # coding: utf-8 | |
2 | """Test the contents webservice API.""" |
|
2 | """Test the contents webservice API.""" | |
3 |
|
3 | |||
|
4 | import base64 | |||
4 | import io |
|
5 | import io | |
5 | import json |
|
6 | import json | |
6 | import os |
|
7 | import os | |
7 | import shutil |
|
8 | import shutil | |
8 | from unicodedata import normalize |
|
9 | from unicodedata import normalize | |
9 |
|
10 | |||
10 | pjoin = os.path.join |
|
11 | pjoin = os.path.join | |
11 |
|
12 | |||
12 | import requests |
|
13 | import requests | |
13 |
|
14 | |||
14 | from IPython.html.utils import url_path_join, url_escape |
|
15 | from IPython.html.utils import url_path_join, url_escape | |
15 | from IPython.html.tests.launchnotebook import NotebookTestBase, assert_http_error |
|
16 | from IPython.html.tests.launchnotebook import NotebookTestBase, assert_http_error | |
16 | from IPython.nbformat import current |
|
17 | from IPython.nbformat import current | |
17 | from IPython.nbformat.current import (new_notebook, write, read, new_worksheet, |
|
18 | from IPython.nbformat.current import (new_notebook, write, read, new_worksheet, | |
18 | new_heading_cell, to_notebook_json) |
|
19 | new_heading_cell, to_notebook_json) | |
19 | from IPython.nbformat import v2 |
|
20 | from IPython.nbformat import v2 | |
20 | from IPython.utils import py3compat |
|
21 | from IPython.utils import py3compat | |
21 | from IPython.utils.data import uniq_stable |
|
22 | from IPython.utils.data import uniq_stable | |
22 |
|
23 | |||
23 |
|
24 | |||
24 | # TODO: Remove this after we create the contents web service and directories are |
|
25 | # TODO: Remove this after we create the contents web service and directories are | |
25 | # no longer listed by the notebook web service. |
|
26 | # no longer listed by the notebook web service. | |
26 |
def notebooks_only( |
|
27 | def notebooks_only(dir_model): | |
27 |
return [nb for nb in |
|
28 | return [nb for nb in dir_model['content'] if nb['type']=='notebook'] | |
28 |
|
29 | |||
29 |
def dirs_only( |
|
30 | def dirs_only(dir_model): | |
30 |
return [x for x in |
|
31 | return [x for x in dir_model['content'] if x['type']=='directory'] | |
31 |
|
32 | |||
32 |
|
33 | |||
33 | class API(object): |
|
34 | class API(object): | |
34 | """Wrapper for contents API calls.""" |
|
35 | """Wrapper for contents API calls.""" | |
35 | def __init__(self, base_url): |
|
36 | def __init__(self, base_url): | |
36 | self.base_url = base_url |
|
37 | self.base_url = base_url | |
37 |
|
38 | |||
38 | def _req(self, verb, path, body=None): |
|
39 | def _req(self, verb, path, body=None): | |
39 | response = requests.request(verb, |
|
40 | response = requests.request(verb, | |
40 | url_path_join(self.base_url, 'api/contents', path), |
|
41 | url_path_join(self.base_url, 'api/contents', path), | |
41 | data=body, |
|
42 | data=body, | |
42 | ) |
|
43 | ) | |
43 | response.raise_for_status() |
|
44 | response.raise_for_status() | |
44 | return response |
|
45 | return response | |
45 |
|
46 | |||
46 | def list(self, path='/'): |
|
47 | def list(self, path='/'): | |
47 | return self._req('GET', path) |
|
48 | return self._req('GET', path) | |
48 |
|
49 | |||
49 | def read(self, name, path='/'): |
|
50 | def read(self, name, path='/'): | |
50 | return self._req('GET', url_path_join(path, name)) |
|
51 | return self._req('GET', url_path_join(path, name)) | |
51 |
|
52 | |||
52 | def create_untitled(self, path='/'): |
|
53 | def create_untitled(self, path='/'): | |
53 | return self._req('POST', path) |
|
54 | return self._req('POST', path) | |
54 |
|
55 | |||
55 | def upload_untitled(self, body, path='/'): |
|
56 | def upload_untitled(self, body, path='/'): | |
56 | return self._req('POST', path, body) |
|
57 | return self._req('POST', path, body) | |
57 |
|
58 | |||
58 | def copy_untitled(self, copy_from, path='/'): |
|
59 | def copy_untitled(self, copy_from, path='/'): | |
59 | body = json.dumps({'copy_from':copy_from}) |
|
60 | body = json.dumps({'copy_from':copy_from}) | |
60 | return self._req('POST', path, body) |
|
61 | return self._req('POST', path, body) | |
61 |
|
62 | |||
62 | def create(self, name, path='/'): |
|
63 | def create(self, name, path='/'): | |
63 | return self._req('PUT', url_path_join(path, name)) |
|
64 | return self._req('PUT', url_path_join(path, name)) | |
64 |
|
65 | |||
65 | def upload(self, name, body, path='/'): |
|
66 | def upload(self, name, body, path='/'): | |
66 | return self._req('PUT', url_path_join(path, name), body) |
|
67 | return self._req('PUT', url_path_join(path, name), body) | |
67 |
|
68 | |||
68 | def copy(self, copy_from, copy_to, path='/'): |
|
69 | def copy(self, copy_from, copy_to, path='/'): | |
69 | body = json.dumps({'copy_from':copy_from}) |
|
70 | body = json.dumps({'copy_from':copy_from}) | |
70 | return self._req('PUT', url_path_join(path, copy_to), body) |
|
71 | return self._req('PUT', url_path_join(path, copy_to), body) | |
71 |
|
72 | |||
72 | def save(self, name, body, path='/'): |
|
73 | def save(self, name, body, path='/'): | |
73 | return self._req('PUT', url_path_join(path, name), body) |
|
74 | return self._req('PUT', url_path_join(path, name), body) | |
74 |
|
75 | |||
75 | def delete(self, name, path='/'): |
|
76 | def delete(self, name, path='/'): | |
76 | return self._req('DELETE', url_path_join(path, name)) |
|
77 | return self._req('DELETE', url_path_join(path, name)) | |
77 |
|
78 | |||
78 | def rename(self, name, path, new_name): |
|
79 | def rename(self, name, path, new_name): | |
79 | body = json.dumps({'name': new_name}) |
|
80 | body = json.dumps({'name': new_name}) | |
80 | return self._req('PATCH', url_path_join(path, name), body) |
|
81 | return self._req('PATCH', url_path_join(path, name), body) | |
81 |
|
82 | |||
82 | def get_checkpoints(self, name, path): |
|
83 | def get_checkpoints(self, name, path): | |
83 | return self._req('GET', url_path_join(path, name, 'checkpoints')) |
|
84 | return self._req('GET', url_path_join(path, name, 'checkpoints')) | |
84 |
|
85 | |||
85 | def new_checkpoint(self, name, path): |
|
86 | def new_checkpoint(self, name, path): | |
86 | return self._req('POST', url_path_join(path, name, 'checkpoints')) |
|
87 | return self._req('POST', url_path_join(path, name, 'checkpoints')) | |
87 |
|
88 | |||
88 | def restore_checkpoint(self, name, path, checkpoint_id): |
|
89 | def restore_checkpoint(self, name, path, checkpoint_id): | |
89 | return self._req('POST', url_path_join(path, name, 'checkpoints', checkpoint_id)) |
|
90 | return self._req('POST', url_path_join(path, name, 'checkpoints', checkpoint_id)) | |
90 |
|
91 | |||
91 | def delete_checkpoint(self, name, path, checkpoint_id): |
|
92 | def delete_checkpoint(self, name, path, checkpoint_id): | |
92 | return self._req('DELETE', url_path_join(path, name, 'checkpoints', checkpoint_id)) |
|
93 | return self._req('DELETE', url_path_join(path, name, 'checkpoints', checkpoint_id)) | |
93 |
|
94 | |||
94 | class APITest(NotebookTestBase): |
|
95 | class APITest(NotebookTestBase): | |
95 | """Test the kernels web service API""" |
|
96 | """Test the kernels web service API""" | |
96 | dirs_nbs = [('', 'inroot'), |
|
97 | dirs_nbs = [('', 'inroot'), | |
97 | ('Directory with spaces in', 'inspace'), |
|
98 | ('Directory with spaces in', 'inspace'), | |
98 | (u'unicodΓ©', 'innonascii'), |
|
99 | (u'unicodΓ©', 'innonascii'), | |
99 | ('foo', 'a'), |
|
100 | ('foo', 'a'), | |
100 | ('foo', 'b'), |
|
101 | ('foo', 'b'), | |
101 | ('foo', 'name with spaces'), |
|
102 | ('foo', 'name with spaces'), | |
102 | ('foo', u'unicodΓ©'), |
|
103 | ('foo', u'unicodΓ©'), | |
103 | ('foo/bar', 'baz'), |
|
104 | ('foo/bar', 'baz'), | |
104 | ('ordering', 'A'), |
|
105 | ('ordering', 'A'), | |
105 | ('ordering', 'b'), |
|
106 | ('ordering', 'b'), | |
106 | ('ordering', 'C'), |
|
107 | ('ordering', 'C'), | |
107 | (u'Γ₯ b', u'Γ§ d'), |
|
108 | (u'Γ₯ b', u'Γ§ d'), | |
108 | ] |
|
109 | ] | |
109 | hidden_dirs = ['.hidden', '__pycache__'] |
|
110 | hidden_dirs = ['.hidden', '__pycache__'] | |
110 |
|
111 | |||
111 | dirs = uniq_stable([py3compat.cast_unicode(d) for (d,n) in dirs_nbs]) |
|
112 | dirs = uniq_stable([py3compat.cast_unicode(d) for (d,n) in dirs_nbs]) | |
112 | del dirs[0] # remove '' |
|
113 | del dirs[0] # remove '' | |
113 | top_level_dirs = {normalize('NFC', d.split('/')[0]) for d in dirs} |
|
114 | top_level_dirs = {normalize('NFC', d.split('/')[0]) for d in dirs} | |
114 |
|
115 | |||
|
116 | @staticmethod | |||
|
117 | def _blob_for_name(name): | |||
|
118 | return name.encode('utf-8') + b'\xFF' | |||
|
119 | ||||
|
120 | @staticmethod | |||
|
121 | def _txt_for_name(name): | |||
|
122 | return u'%s text file' % name | |||
|
123 | ||||
115 | def setUp(self): |
|
124 | def setUp(self): | |
116 | nbdir = self.notebook_dir.name |
|
125 | nbdir = self.notebook_dir.name | |
|
126 | self.blob = os.urandom(100) | |||
|
127 | self.b64_blob = base64.encodestring(self.blob).decode('ascii') | |||
|
128 | ||||
|
129 | ||||
117 |
|
130 | |||
118 | for d in (self.dirs + self.hidden_dirs): |
|
131 | for d in (self.dirs + self.hidden_dirs): | |
119 | d.replace('/', os.sep) |
|
132 | d.replace('/', os.sep) | |
120 | if not os.path.isdir(pjoin(nbdir, d)): |
|
133 | if not os.path.isdir(pjoin(nbdir, d)): | |
121 | os.mkdir(pjoin(nbdir, d)) |
|
134 | os.mkdir(pjoin(nbdir, d)) | |
122 |
|
135 | |||
123 | for d, name in self.dirs_nbs: |
|
136 | for d, name in self.dirs_nbs: | |
124 | d = d.replace('/', os.sep) |
|
137 | d = d.replace('/', os.sep) | |
|
138 | # create a notebook | |||
125 | with io.open(pjoin(nbdir, d, '%s.ipynb' % name), 'w', |
|
139 | with io.open(pjoin(nbdir, d, '%s.ipynb' % name), 'w', | |
126 | encoding='utf-8') as f: |
|
140 | encoding='utf-8') as f: | |
127 | nb = new_notebook(name=name) |
|
141 | nb = new_notebook(name=name) | |
128 | write(nb, f, format='ipynb') |
|
142 | write(nb, f, format='ipynb') | |
129 |
|
143 | |||
|
144 | # create a text file | |||
|
145 | with io.open(pjoin(nbdir, d, '%s.txt' % name), 'w', | |||
|
146 | encoding='utf-8') as f: | |||
|
147 | f.write(self._txt_for_name(name)) | |||
|
148 | ||||
|
149 | # create a binary file | |||
|
150 | with io.open(pjoin(nbdir, d, '%s.blob' % name), 'wb') as f: | |||
|
151 | f.write(self._blob_for_name(name)) | |||
|
152 | ||||
130 | self.api = API(self.base_url()) |
|
153 | self.api = API(self.base_url()) | |
131 |
|
154 | |||
132 | def tearDown(self): |
|
155 | def tearDown(self): | |
133 | nbdir = self.notebook_dir.name |
|
156 | nbdir = self.notebook_dir.name | |
134 |
|
157 | |||
135 | for dname in (list(self.top_level_dirs) + self.hidden_dirs): |
|
158 | for dname in (list(self.top_level_dirs) + self.hidden_dirs): | |
136 | shutil.rmtree(pjoin(nbdir, dname), ignore_errors=True) |
|
159 | shutil.rmtree(pjoin(nbdir, dname), ignore_errors=True) | |
137 |
|
160 | |||
138 | if os.path.isfile(pjoin(nbdir, 'inroot.ipynb')): |
|
161 | if os.path.isfile(pjoin(nbdir, 'inroot.ipynb')): | |
139 | os.unlink(pjoin(nbdir, 'inroot.ipynb')) |
|
162 | os.unlink(pjoin(nbdir, 'inroot.ipynb')) | |
140 |
|
163 | |||
141 | def test_list_notebooks(self): |
|
164 | def test_list_notebooks(self): | |
142 | nbs = notebooks_only(self.api.list().json()) |
|
165 | nbs = notebooks_only(self.api.list().json()) | |
143 | self.assertEqual(len(nbs), 1) |
|
166 | self.assertEqual(len(nbs), 1) | |
144 | self.assertEqual(nbs[0]['name'], 'inroot.ipynb') |
|
167 | self.assertEqual(nbs[0]['name'], 'inroot.ipynb') | |
145 |
|
168 | |||
146 | nbs = notebooks_only(self.api.list('/Directory with spaces in/').json()) |
|
169 | nbs = notebooks_only(self.api.list('/Directory with spaces in/').json()) | |
147 | self.assertEqual(len(nbs), 1) |
|
170 | self.assertEqual(len(nbs), 1) | |
148 | self.assertEqual(nbs[0]['name'], 'inspace.ipynb') |
|
171 | self.assertEqual(nbs[0]['name'], 'inspace.ipynb') | |
149 |
|
172 | |||
150 | nbs = notebooks_only(self.api.list(u'/unicodΓ©/').json()) |
|
173 | nbs = notebooks_only(self.api.list(u'/unicodΓ©/').json()) | |
151 | self.assertEqual(len(nbs), 1) |
|
174 | self.assertEqual(len(nbs), 1) | |
152 | self.assertEqual(nbs[0]['name'], 'innonascii.ipynb') |
|
175 | self.assertEqual(nbs[0]['name'], 'innonascii.ipynb') | |
153 | self.assertEqual(nbs[0]['path'], u'unicodΓ©') |
|
176 | self.assertEqual(nbs[0]['path'], u'unicodΓ©') | |
154 |
|
177 | |||
155 | nbs = notebooks_only(self.api.list('/foo/bar/').json()) |
|
178 | nbs = notebooks_only(self.api.list('/foo/bar/').json()) | |
156 | self.assertEqual(len(nbs), 1) |
|
179 | self.assertEqual(len(nbs), 1) | |
157 | self.assertEqual(nbs[0]['name'], 'baz.ipynb') |
|
180 | self.assertEqual(nbs[0]['name'], 'baz.ipynb') | |
158 | self.assertEqual(nbs[0]['path'], 'foo/bar') |
|
181 | self.assertEqual(nbs[0]['path'], 'foo/bar') | |
159 |
|
182 | |||
160 | nbs = notebooks_only(self.api.list('foo').json()) |
|
183 | nbs = notebooks_only(self.api.list('foo').json()) | |
161 | self.assertEqual(len(nbs), 4) |
|
184 | self.assertEqual(len(nbs), 4) | |
162 | nbnames = { normalize('NFC', n['name']) for n in nbs } |
|
185 | nbnames = { normalize('NFC', n['name']) for n in nbs } | |
163 | expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodΓ©.ipynb'] |
|
186 | expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodΓ©.ipynb'] | |
164 | expected = { normalize('NFC', name) for name in expected } |
|
187 | expected = { normalize('NFC', name) for name in expected } | |
165 | self.assertEqual(nbnames, expected) |
|
188 | self.assertEqual(nbnames, expected) | |
166 |
|
189 | |||
167 | nbs = notebooks_only(self.api.list('ordering').json()) |
|
190 | nbs = notebooks_only(self.api.list('ordering').json()) | |
168 | nbnames = [n['name'] for n in nbs] |
|
191 | nbnames = [n['name'] for n in nbs] | |
169 | expected = ['A.ipynb', 'b.ipynb', 'C.ipynb'] |
|
192 | expected = ['A.ipynb', 'b.ipynb', 'C.ipynb'] | |
170 | self.assertEqual(nbnames, expected) |
|
193 | self.assertEqual(nbnames, expected) | |
171 |
|
194 | |||
172 | def test_list_dirs(self): |
|
195 | def test_list_dirs(self): | |
173 | dirs = dirs_only(self.api.list().json()) |
|
196 | dirs = dirs_only(self.api.list().json()) | |
174 | dir_names = {normalize('NFC', d['name']) for d in dirs} |
|
197 | dir_names = {normalize('NFC', d['name']) for d in dirs} | |
175 | self.assertEqual(dir_names, self.top_level_dirs) # Excluding hidden dirs |
|
198 | self.assertEqual(dir_names, self.top_level_dirs) # Excluding hidden dirs | |
176 |
|
199 | |||
177 | def test_list_nonexistant_dir(self): |
|
200 | def test_list_nonexistant_dir(self): | |
178 | with assert_http_error(404): |
|
201 | with assert_http_error(404): | |
179 | self.api.list('nonexistant') |
|
202 | self.api.list('nonexistant') | |
180 |
|
203 | |||
181 | def test_get_contents(self): |
|
204 | def test_get_nb_contents(self): | |
182 | for d, name in self.dirs_nbs: |
|
205 | for d, name in self.dirs_nbs: | |
183 | nb = self.api.read('%s.ipynb' % name, d+'/').json() |
|
206 | nb = self.api.read('%s.ipynb' % name, d+'/').json() | |
184 | self.assertEqual(nb['name'], u'%s.ipynb' % name) |
|
207 | self.assertEqual(nb['name'], u'%s.ipynb' % name) | |
|
208 | self.assertEqual(nb['type'], 'notebook') | |||
|
209 | self.assertIn('content', nb) | |||
|
210 | self.assertEqual(nb['format'], 'json') | |||
185 | self.assertIn('content', nb) |
|
211 | self.assertIn('content', nb) | |
186 | self.assertIn('metadata', nb['content']) |
|
212 | self.assertIn('metadata', nb['content']) | |
187 | self.assertIsInstance(nb['content']['metadata'], dict) |
|
213 | self.assertIsInstance(nb['content']['metadata'], dict) | |
188 |
|
214 | |||
|
215 | def test_get_contents_no_such_file(self): | |||
189 | # Name that doesn't exist - should be a 404 |
|
216 | # Name that doesn't exist - should be a 404 | |
190 | with assert_http_error(404): |
|
217 | with assert_http_error(404): | |
191 | self.api.read('q.ipynb', 'foo') |
|
218 | self.api.read('q.ipynb', 'foo') | |
192 |
|
219 | |||
|
220 | def test_get_text_file_contents(self): | |||
|
221 | for d, name in self.dirs_nbs: | |||
|
222 | model = self.api.read(u'%s.txt' % name, d+'/').json() | |||
|
223 | self.assertEqual(model['name'], u'%s.txt' % name) | |||
|
224 | self.assertIn('content', model) | |||
|
225 | self.assertEqual(model['format'], 'text') | |||
|
226 | self.assertEqual(model['type'], 'file') | |||
|
227 | self.assertEqual(model['content'], self._txt_for_name(name)) | |||
|
228 | ||||
|
229 | # Name that doesn't exist - should be a 404 | |||
|
230 | with assert_http_error(404): | |||
|
231 | self.api.read('q.txt', 'foo') | |||
|
232 | ||||
|
233 | def test_get_binary_file_contents(self): | |||
|
234 | for d, name in self.dirs_nbs: | |||
|
235 | model = self.api.read(u'%s.blob' % name, d+'/').json() | |||
|
236 | self.assertEqual(model['name'], u'%s.blob' % name) | |||
|
237 | self.assertIn('content', model) | |||
|
238 | self.assertEqual(model['format'], 'base64') | |||
|
239 | self.assertEqual(model['type'], 'file') | |||
|
240 | b64_data = base64.encodestring(self._blob_for_name(name)).decode('ascii') | |||
|
241 | self.assertEqual(model['content'], b64_data) | |||
|
242 | ||||
|
243 | # Name that doesn't exist - should be a 404 | |||
|
244 | with assert_http_error(404): | |||
|
245 | self.api.read('q.txt', 'foo') | |||
|
246 | ||||
193 | def _check_nb_created(self, resp, name, path): |
|
247 | def _check_nb_created(self, resp, name, path): | |
194 | self.assertEqual(resp.status_code, 201) |
|
248 | self.assertEqual(resp.status_code, 201) | |
195 | location_header = py3compat.str_to_unicode(resp.headers['Location']) |
|
249 | location_header = py3compat.str_to_unicode(resp.headers['Location']) | |
196 | self.assertEqual(location_header, url_escape(url_path_join(u'/api/contents', path, name))) |
|
250 | self.assertEqual(location_header, url_escape(url_path_join(u'/api/contents', path, name))) | |
197 | self.assertEqual(resp.json()['name'], name) |
|
251 | self.assertEqual(resp.json()['name'], name) | |
198 | assert os.path.isfile(pjoin( |
|
252 | assert os.path.isfile(pjoin( | |
199 | self.notebook_dir.name, |
|
253 | self.notebook_dir.name, | |
200 | path.replace('/', os.sep), |
|
254 | path.replace('/', os.sep), | |
201 | name, |
|
255 | name, | |
202 | )) |
|
256 | )) | |
203 |
|
257 | |||
204 | def test_create_untitled(self): |
|
258 | def test_create_untitled(self): | |
205 | resp = self.api.create_untitled(path=u'Γ₯ b') |
|
259 | resp = self.api.create_untitled(path=u'Γ₯ b') | |
206 | self._check_nb_created(resp, 'Untitled0.ipynb', u'Γ₯ b') |
|
260 | self._check_nb_created(resp, 'Untitled0.ipynb', u'Γ₯ b') | |
207 |
|
261 | |||
208 | # Second time |
|
262 | # Second time | |
209 | resp = self.api.create_untitled(path=u'Γ₯ b') |
|
263 | resp = self.api.create_untitled(path=u'Γ₯ b') | |
210 | self._check_nb_created(resp, 'Untitled1.ipynb', u'Γ₯ b') |
|
264 | self._check_nb_created(resp, 'Untitled1.ipynb', u'Γ₯ b') | |
211 |
|
265 | |||
212 | # And two directories down |
|
266 | # And two directories down | |
213 | resp = self.api.create_untitled(path='foo/bar') |
|
267 | resp = self.api.create_untitled(path='foo/bar') | |
214 | self._check_nb_created(resp, 'Untitled0.ipynb', 'foo/bar') |
|
268 | self._check_nb_created(resp, 'Untitled0.ipynb', 'foo/bar') | |
215 |
|
269 | |||
216 | def test_upload_untitled(self): |
|
270 | def test_upload_untitled(self): | |
217 | nb = new_notebook(name='Upload test') |
|
271 | nb = new_notebook(name='Upload test') | |
218 | nbmodel = {'content': nb} |
|
272 | nbmodel = {'content': nb} | |
219 | resp = self.api.upload_untitled(path=u'Γ₯ b', |
|
273 | resp = self.api.upload_untitled(path=u'Γ₯ b', | |
220 | body=json.dumps(nbmodel)) |
|
274 | body=json.dumps(nbmodel)) | |
221 | self._check_nb_created(resp, 'Untitled0.ipynb', u'Γ₯ b') |
|
275 | self._check_nb_created(resp, 'Untitled0.ipynb', u'Γ₯ b') | |
222 |
|
276 | |||
223 | def test_upload(self): |
|
277 | def test_upload(self): | |
224 | nb = new_notebook(name=u'ignored') |
|
278 | nb = new_notebook(name=u'ignored') | |
225 | nbmodel = {'content': nb} |
|
279 | nbmodel = {'content': nb} | |
226 | resp = self.api.upload(u'Upload tΓ©st.ipynb', path=u'Γ₯ b', |
|
280 | resp = self.api.upload(u'Upload tΓ©st.ipynb', path=u'Γ₯ b', | |
227 | body=json.dumps(nbmodel)) |
|
281 | body=json.dumps(nbmodel)) | |
228 | self._check_nb_created(resp, u'Upload tΓ©st.ipynb', u'Γ₯ b') |
|
282 | self._check_nb_created(resp, u'Upload tΓ©st.ipynb', u'Γ₯ b') | |
229 |
|
283 | |||
230 | def test_upload_v2(self): |
|
284 | def test_upload_v2(self): | |
231 | nb = v2.new_notebook() |
|
285 | nb = v2.new_notebook() | |
232 | ws = v2.new_worksheet() |
|
286 | ws = v2.new_worksheet() | |
233 | nb.worksheets.append(ws) |
|
287 | nb.worksheets.append(ws) | |
234 | ws.cells.append(v2.new_code_cell(input='print("hi")')) |
|
288 | ws.cells.append(v2.new_code_cell(input='print("hi")')) | |
235 | nbmodel = {'content': nb} |
|
289 | nbmodel = {'content': nb} | |
236 | resp = self.api.upload(u'Upload tΓ©st.ipynb', path=u'Γ₯ b', |
|
290 | resp = self.api.upload(u'Upload tΓ©st.ipynb', path=u'Γ₯ b', | |
237 | body=json.dumps(nbmodel)) |
|
291 | body=json.dumps(nbmodel)) | |
238 | self._check_nb_created(resp, u'Upload tΓ©st.ipynb', u'Γ₯ b') |
|
292 | self._check_nb_created(resp, u'Upload tΓ©st.ipynb', u'Γ₯ b') | |
239 | resp = self.api.read(u'Upload tΓ©st.ipynb', u'Γ₯ b') |
|
293 | resp = self.api.read(u'Upload tΓ©st.ipynb', u'Γ₯ b') | |
240 | data = resp.json() |
|
294 | data = resp.json() | |
241 | self.assertEqual(data['content']['nbformat'], current.nbformat) |
|
295 | self.assertEqual(data['content']['nbformat'], current.nbformat) | |
242 | self.assertEqual(data['content']['orig_nbformat'], 2) |
|
296 | self.assertEqual(data['content']['orig_nbformat'], 2) | |
243 |
|
297 | |||
244 | def test_copy_untitled(self): |
|
298 | def test_copy_untitled(self): | |
245 | resp = self.api.copy_untitled(u'Γ§ d.ipynb', path=u'Γ₯ b') |
|
299 | resp = self.api.copy_untitled(u'Γ§ d.ipynb', path=u'Γ₯ b') | |
246 | self._check_nb_created(resp, u'Γ§ d-Copy0.ipynb', u'Γ₯ b') |
|
300 | self._check_nb_created(resp, u'Γ§ d-Copy0.ipynb', u'Γ₯ b') | |
247 |
|
301 | |||
248 | def test_copy(self): |
|
302 | def test_copy(self): | |
249 | resp = self.api.copy(u'Γ§ d.ipynb', u'cΓΈpy.ipynb', path=u'Γ₯ b') |
|
303 | resp = self.api.copy(u'Γ§ d.ipynb', u'cΓΈpy.ipynb', path=u'Γ₯ b') | |
250 | self._check_nb_created(resp, u'cΓΈpy.ipynb', u'Γ₯ b') |
|
304 | self._check_nb_created(resp, u'cΓΈpy.ipynb', u'Γ₯ b') | |
251 |
|
305 | |||
252 | def test_delete(self): |
|
306 | def test_delete(self): | |
253 | for d, name in self.dirs_nbs: |
|
307 | for d, name in self.dirs_nbs: | |
254 | resp = self.api.delete('%s.ipynb' % name, d) |
|
308 | resp = self.api.delete('%s.ipynb' % name, d) | |
255 | self.assertEqual(resp.status_code, 204) |
|
309 | self.assertEqual(resp.status_code, 204) | |
256 |
|
310 | |||
257 | for d in self.dirs + ['/']: |
|
311 | for d in self.dirs + ['/']: | |
258 | nbs = notebooks_only(self.api.list(d).json()) |
|
312 | nbs = notebooks_only(self.api.list(d).json()) | |
259 | self.assertEqual(len(nbs), 0) |
|
313 | self.assertEqual(len(nbs), 0) | |
260 |
|
314 | |||
261 | def test_rename(self): |
|
315 | def test_rename(self): | |
262 | resp = self.api.rename('a.ipynb', 'foo', 'z.ipynb') |
|
316 | resp = self.api.rename('a.ipynb', 'foo', 'z.ipynb') | |
263 | self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb') |
|
317 | self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb') | |
264 | self.assertEqual(resp.json()['name'], 'z.ipynb') |
|
318 | self.assertEqual(resp.json()['name'], 'z.ipynb') | |
265 | assert os.path.isfile(pjoin(self.notebook_dir.name, 'foo', 'z.ipynb')) |
|
319 | assert os.path.isfile(pjoin(self.notebook_dir.name, 'foo', 'z.ipynb')) | |
266 |
|
320 | |||
267 | nbs = notebooks_only(self.api.list('foo').json()) |
|
321 | nbs = notebooks_only(self.api.list('foo').json()) | |
268 | nbnames = set(n['name'] for n in nbs) |
|
322 | nbnames = set(n['name'] for n in nbs) | |
269 | self.assertIn('z.ipynb', nbnames) |
|
323 | self.assertIn('z.ipynb', nbnames) | |
270 | self.assertNotIn('a.ipynb', nbnames) |
|
324 | self.assertNotIn('a.ipynb', nbnames) | |
271 |
|
325 | |||
272 | def test_rename_existing(self): |
|
326 | def test_rename_existing(self): | |
273 | with assert_http_error(409): |
|
327 | with assert_http_error(409): | |
274 | self.api.rename('a.ipynb', 'foo', 'b.ipynb') |
|
328 | self.api.rename('a.ipynb', 'foo', 'b.ipynb') | |
275 |
|
329 | |||
276 | def test_save(self): |
|
330 | def test_save(self): | |
277 | resp = self.api.read('a.ipynb', 'foo') |
|
331 | resp = self.api.read('a.ipynb', 'foo') | |
278 | nbcontent = json.loads(resp.text)['content'] |
|
332 | nbcontent = json.loads(resp.text)['content'] | |
279 | nb = to_notebook_json(nbcontent) |
|
333 | nb = to_notebook_json(nbcontent) | |
280 | ws = new_worksheet() |
|
334 | ws = new_worksheet() | |
281 | nb.worksheets = [ws] |
|
335 | nb.worksheets = [ws] | |
282 | ws.cells.append(new_heading_cell(u'Created by test Β³')) |
|
336 | ws.cells.append(new_heading_cell(u'Created by test Β³')) | |
283 |
|
337 | |||
284 | nbmodel= {'name': 'a.ipynb', 'path':'foo', 'content': nb} |
|
338 | nbmodel= {'name': 'a.ipynb', 'path':'foo', 'content': nb} | |
285 | resp = self.api.save('a.ipynb', path='foo', body=json.dumps(nbmodel)) |
|
339 | resp = self.api.save('a.ipynb', path='foo', body=json.dumps(nbmodel)) | |
286 |
|
340 | |||
287 | nbfile = pjoin(self.notebook_dir.name, 'foo', 'a.ipynb') |
|
341 | nbfile = pjoin(self.notebook_dir.name, 'foo', 'a.ipynb') | |
288 | with io.open(nbfile, 'r', encoding='utf-8') as f: |
|
342 | with io.open(nbfile, 'r', encoding='utf-8') as f: | |
289 | newnb = read(f, format='ipynb') |
|
343 | newnb = read(f, format='ipynb') | |
290 | self.assertEqual(newnb.worksheets[0].cells[0].source, |
|
344 | self.assertEqual(newnb.worksheets[0].cells[0].source, | |
291 | u'Created by test Β³') |
|
345 | u'Created by test Β³') | |
292 | nbcontent = self.api.read('a.ipynb', 'foo').json()['content'] |
|
346 | nbcontent = self.api.read('a.ipynb', 'foo').json()['content'] | |
293 | newnb = to_notebook_json(nbcontent) |
|
347 | newnb = to_notebook_json(nbcontent) | |
294 | self.assertEqual(newnb.worksheets[0].cells[0].source, |
|
348 | self.assertEqual(newnb.worksheets[0].cells[0].source, | |
295 | u'Created by test Β³') |
|
349 | u'Created by test Β³') | |
296 |
|
350 | |||
297 | # Save and rename |
|
351 | # Save and rename | |
298 | nbmodel= {'name': 'a2.ipynb', 'path':'foo/bar', 'content': nb} |
|
352 | nbmodel= {'name': 'a2.ipynb', 'path':'foo/bar', 'content': nb} | |
299 | resp = self.api.save('a.ipynb', path='foo', body=json.dumps(nbmodel)) |
|
353 | resp = self.api.save('a.ipynb', path='foo', body=json.dumps(nbmodel)) | |
300 | saved = resp.json() |
|
354 | saved = resp.json() | |
301 | self.assertEqual(saved['name'], 'a2.ipynb') |
|
355 | self.assertEqual(saved['name'], 'a2.ipynb') | |
302 | self.assertEqual(saved['path'], 'foo/bar') |
|
356 | self.assertEqual(saved['path'], 'foo/bar') | |
303 | assert os.path.isfile(pjoin(self.notebook_dir.name,'foo','bar','a2.ipynb')) |
|
357 | assert os.path.isfile(pjoin(self.notebook_dir.name,'foo','bar','a2.ipynb')) | |
304 | assert not os.path.isfile(pjoin(self.notebook_dir.name, 'foo', 'a.ipynb')) |
|
358 | assert not os.path.isfile(pjoin(self.notebook_dir.name, 'foo', 'a.ipynb')) | |
305 | with assert_http_error(404): |
|
359 | with assert_http_error(404): | |
306 | self.api.read('a.ipynb', 'foo') |
|
360 | self.api.read('a.ipynb', 'foo') | |
307 |
|
361 | |||
308 | def test_checkpoints(self): |
|
362 | def test_checkpoints(self): | |
309 | resp = self.api.read('a.ipynb', 'foo') |
|
363 | resp = self.api.read('a.ipynb', 'foo') | |
310 | r = self.api.new_checkpoint('a.ipynb', 'foo') |
|
364 | r = self.api.new_checkpoint('a.ipynb', 'foo') | |
311 | self.assertEqual(r.status_code, 201) |
|
365 | self.assertEqual(r.status_code, 201) | |
312 | cp1 = r.json() |
|
366 | cp1 = r.json() | |
313 | self.assertEqual(set(cp1), {'id', 'last_modified'}) |
|
367 | self.assertEqual(set(cp1), {'id', 'last_modified'}) | |
314 | self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) |
|
368 | self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) | |
315 |
|
369 | |||
316 | # Modify it |
|
370 | # Modify it | |
317 | nbcontent = json.loads(resp.text)['content'] |
|
371 | nbcontent = json.loads(resp.text)['content'] | |
318 | nb = to_notebook_json(nbcontent) |
|
372 | nb = to_notebook_json(nbcontent) | |
319 | ws = new_worksheet() |
|
373 | ws = new_worksheet() | |
320 | nb.worksheets = [ws] |
|
374 | nb.worksheets = [ws] | |
321 | hcell = new_heading_cell('Created by test') |
|
375 | hcell = new_heading_cell('Created by test') | |
322 | ws.cells.append(hcell) |
|
376 | ws.cells.append(hcell) | |
323 | # Save |
|
377 | # Save | |
324 | nbmodel= {'name': 'a.ipynb', 'path':'foo', 'content': nb} |
|
378 | nbmodel= {'name': 'a.ipynb', 'path':'foo', 'content': nb} | |
325 | resp = self.api.save('a.ipynb', path='foo', body=json.dumps(nbmodel)) |
|
379 | resp = self.api.save('a.ipynb', path='foo', body=json.dumps(nbmodel)) | |
326 |
|
380 | |||
327 | # List checkpoints |
|
381 | # List checkpoints | |
328 | cps = self.api.get_checkpoints('a.ipynb', 'foo').json() |
|
382 | cps = self.api.get_checkpoints('a.ipynb', 'foo').json() | |
329 | self.assertEqual(cps, [cp1]) |
|
383 | self.assertEqual(cps, [cp1]) | |
330 |
|
384 | |||
331 | nbcontent = self.api.read('a.ipynb', 'foo').json()['content'] |
|
385 | nbcontent = self.api.read('a.ipynb', 'foo').json()['content'] | |
332 | nb = to_notebook_json(nbcontent) |
|
386 | nb = to_notebook_json(nbcontent) | |
333 | self.assertEqual(nb.worksheets[0].cells[0].source, 'Created by test') |
|
387 | self.assertEqual(nb.worksheets[0].cells[0].source, 'Created by test') | |
334 |
|
388 | |||
335 | # Restore cp1 |
|
389 | # Restore cp1 | |
336 | r = self.api.restore_checkpoint('a.ipynb', 'foo', cp1['id']) |
|
390 | r = self.api.restore_checkpoint('a.ipynb', 'foo', cp1['id']) | |
337 | self.assertEqual(r.status_code, 204) |
|
391 | self.assertEqual(r.status_code, 204) | |
338 | nbcontent = self.api.read('a.ipynb', 'foo').json()['content'] |
|
392 | nbcontent = self.api.read('a.ipynb', 'foo').json()['content'] | |
339 | nb = to_notebook_json(nbcontent) |
|
393 | nb = to_notebook_json(nbcontent) | |
340 | self.assertEqual(nb.worksheets, []) |
|
394 | self.assertEqual(nb.worksheets, []) | |
341 |
|
395 | |||
342 | # Delete cp1 |
|
396 | # Delete cp1 | |
343 | r = self.api.delete_checkpoint('a.ipynb', 'foo', cp1['id']) |
|
397 | r = self.api.delete_checkpoint('a.ipynb', 'foo', cp1['id']) | |
344 | self.assertEqual(r.status_code, 204) |
|
398 | self.assertEqual(r.status_code, 204) | |
345 | cps = self.api.get_checkpoints('a.ipynb', 'foo').json() |
|
399 | cps = self.api.get_checkpoints('a.ipynb', 'foo').json() | |
346 | self.assertEqual(cps, []) |
|
400 | self.assertEqual(cps, []) |
@@ -1,301 +1,301 b'' | |||||
1 | # coding: utf-8 |
|
1 | # coding: utf-8 | |
2 | """Tests for the notebook manager.""" |
|
2 | """Tests for the notebook manager.""" | |
3 | from __future__ import print_function |
|
3 | from __future__ import print_function | |
4 |
|
4 | |||
5 | import logging |
|
5 | import logging | |
6 | import os |
|
6 | import os | |
7 |
|
7 | |||
8 | from tornado.web import HTTPError |
|
8 | from tornado.web import HTTPError | |
9 | from unittest import TestCase |
|
9 | from unittest import TestCase | |
10 | from tempfile import NamedTemporaryFile |
|
10 | from tempfile import NamedTemporaryFile | |
11 |
|
11 | |||
12 | from IPython.nbformat import current |
|
12 | from IPython.nbformat import current | |
13 |
|
13 | |||
14 | from IPython.utils.tempdir import TemporaryDirectory |
|
14 | from IPython.utils.tempdir import TemporaryDirectory | |
15 | from IPython.utils.traitlets import TraitError |
|
15 | from IPython.utils.traitlets import TraitError | |
16 | from IPython.html.utils import url_path_join |
|
16 | from IPython.html.utils import url_path_join | |
17 |
|
17 | |||
18 | from ..filemanager import FileContentsManager |
|
18 | from ..filemanager import FileContentsManager | |
19 | from ..manager import ContentsManager |
|
19 | from ..manager import ContentsManager | |
20 |
|
20 | |||
21 |
|
21 | |||
22 | class TestFileContentsManager(TestCase): |
|
22 | class TestFileContentsManager(TestCase): | |
23 |
|
23 | |||
24 | def test_root_dir(self): |
|
24 | def test_root_dir(self): | |
25 | with TemporaryDirectory() as td: |
|
25 | with TemporaryDirectory() as td: | |
26 | fm = FileContentsManager(root_dir=td) |
|
26 | fm = FileContentsManager(root_dir=td) | |
27 | self.assertEqual(fm.root_dir, td) |
|
27 | self.assertEqual(fm.root_dir, td) | |
28 |
|
28 | |||
29 | def test_missing_root_dir(self): |
|
29 | def test_missing_root_dir(self): | |
30 | with TemporaryDirectory() as td: |
|
30 | with TemporaryDirectory() as td: | |
31 | root = os.path.join(td, 'notebook', 'dir', 'is', 'missing') |
|
31 | root = os.path.join(td, 'notebook', 'dir', 'is', 'missing') | |
32 | self.assertRaises(TraitError, FileContentsManager, root_dir=root) |
|
32 | self.assertRaises(TraitError, FileContentsManager, root_dir=root) | |
33 |
|
33 | |||
34 | def test_invalid_root_dir(self): |
|
34 | def test_invalid_root_dir(self): | |
35 | with NamedTemporaryFile() as tf: |
|
35 | with NamedTemporaryFile() as tf: | |
36 | self.assertRaises(TraitError, FileContentsManager, root_dir=tf.name) |
|
36 | self.assertRaises(TraitError, FileContentsManager, root_dir=tf.name) | |
37 |
|
37 | |||
38 | def test_get_os_path(self): |
|
38 | def test_get_os_path(self): | |
39 | # full filesystem path should be returned with correct operating system |
|
39 | # full filesystem path should be returned with correct operating system | |
40 | # separators. |
|
40 | # separators. | |
41 | with TemporaryDirectory() as td: |
|
41 | with TemporaryDirectory() as td: | |
42 | root = td |
|
42 | root = td | |
43 | fm = FileContentsManager(root_dir=root) |
|
43 | fm = FileContentsManager(root_dir=root) | |
44 | path = fm._get_os_path('test.ipynb', '/path/to/notebook/') |
|
44 | path = fm._get_os_path('test.ipynb', '/path/to/notebook/') | |
45 | rel_path_list = '/path/to/notebook/test.ipynb'.split('/') |
|
45 | rel_path_list = '/path/to/notebook/test.ipynb'.split('/') | |
46 | fs_path = os.path.join(fm.root_dir, *rel_path_list) |
|
46 | fs_path = os.path.join(fm.root_dir, *rel_path_list) | |
47 | self.assertEqual(path, fs_path) |
|
47 | self.assertEqual(path, fs_path) | |
48 |
|
48 | |||
49 | fm = FileContentsManager(root_dir=root) |
|
49 | fm = FileContentsManager(root_dir=root) | |
50 | path = fm._get_os_path('test.ipynb') |
|
50 | path = fm._get_os_path('test.ipynb') | |
51 | fs_path = os.path.join(fm.root_dir, 'test.ipynb') |
|
51 | fs_path = os.path.join(fm.root_dir, 'test.ipynb') | |
52 | self.assertEqual(path, fs_path) |
|
52 | self.assertEqual(path, fs_path) | |
53 |
|
53 | |||
54 | fm = FileContentsManager(root_dir=root) |
|
54 | fm = FileContentsManager(root_dir=root) | |
55 | path = fm._get_os_path('test.ipynb', '////') |
|
55 | path = fm._get_os_path('test.ipynb', '////') | |
56 | fs_path = os.path.join(fm.root_dir, 'test.ipynb') |
|
56 | fs_path = os.path.join(fm.root_dir, 'test.ipynb') | |
57 | self.assertEqual(path, fs_path) |
|
57 | self.assertEqual(path, fs_path) | |
58 |
|
58 | |||
59 | def test_checkpoint_subdir(self): |
|
59 | def test_checkpoint_subdir(self): | |
60 | subd = u'sub βir' |
|
60 | subd = u'sub βir' | |
61 | cp_name = 'test-cp.ipynb' |
|
61 | cp_name = 'test-cp.ipynb' | |
62 | with TemporaryDirectory() as td: |
|
62 | with TemporaryDirectory() as td: | |
63 | root = td |
|
63 | root = td | |
64 | os.mkdir(os.path.join(td, subd)) |
|
64 | os.mkdir(os.path.join(td, subd)) | |
65 | fm = FileContentsManager(root_dir=root) |
|
65 | fm = FileContentsManager(root_dir=root) | |
66 | cp_dir = fm.get_checkpoint_path('cp', 'test.ipynb', '/') |
|
66 | cp_dir = fm.get_checkpoint_path('cp', 'test.ipynb', '/') | |
67 | cp_subdir = fm.get_checkpoint_path('cp', 'test.ipynb', '/%s/' % subd) |
|
67 | cp_subdir = fm.get_checkpoint_path('cp', 'test.ipynb', '/%s/' % subd) | |
68 | self.assertNotEqual(cp_dir, cp_subdir) |
|
68 | self.assertNotEqual(cp_dir, cp_subdir) | |
69 | self.assertEqual(cp_dir, os.path.join(root, fm.checkpoint_dir, cp_name)) |
|
69 | self.assertEqual(cp_dir, os.path.join(root, fm.checkpoint_dir, cp_name)) | |
70 | self.assertEqual(cp_subdir, os.path.join(root, subd, fm.checkpoint_dir, cp_name)) |
|
70 | self.assertEqual(cp_subdir, os.path.join(root, subd, fm.checkpoint_dir, cp_name)) | |
71 |
|
71 | |||
72 |
|
72 | |||
73 |
class Test |
|
73 | class TestContentsManager(TestCase): | |
74 |
|
74 | |||
75 | def setUp(self): |
|
75 | def setUp(self): | |
76 | self._temp_dir = TemporaryDirectory() |
|
76 | self._temp_dir = TemporaryDirectory() | |
77 | self.td = self._temp_dir.name |
|
77 | self.td = self._temp_dir.name | |
78 | self.contents_manager = FileContentsManager( |
|
78 | self.contents_manager = FileContentsManager( | |
79 | root_dir=self.td, |
|
79 | root_dir=self.td, | |
80 | log=logging.getLogger() |
|
80 | log=logging.getLogger() | |
81 | ) |
|
81 | ) | |
82 |
|
82 | |||
83 | def tearDown(self): |
|
83 | def tearDown(self): | |
84 | self._temp_dir.cleanup() |
|
84 | self._temp_dir.cleanup() | |
85 |
|
85 | |||
86 | def make_dir(self, abs_path, rel_path): |
|
86 | def make_dir(self, abs_path, rel_path): | |
87 | """make subdirectory, rel_path is the relative path |
|
87 | """make subdirectory, rel_path is the relative path | |
88 | to that directory from the location where the server started""" |
|
88 | to that directory from the location where the server started""" | |
89 | os_path = os.path.join(abs_path, rel_path) |
|
89 | os_path = os.path.join(abs_path, rel_path) | |
90 | try: |
|
90 | try: | |
91 | os.makedirs(os_path) |
|
91 | os.makedirs(os_path) | |
92 | except OSError: |
|
92 | except OSError: | |
93 | print("Directory already exists: %r" % os_path) |
|
93 | print("Directory already exists: %r" % os_path) | |
94 |
|
94 | |||
95 | def add_code_cell(self, nb): |
|
95 | def add_code_cell(self, nb): | |
96 | output = current.new_output("display_data", output_javascript="alert('hi');") |
|
96 | output = current.new_output("display_data", output_javascript="alert('hi');") | |
97 | cell = current.new_code_cell("print('hi')", outputs=[output]) |
|
97 | cell = current.new_code_cell("print('hi')", outputs=[output]) | |
98 | if not nb.worksheets: |
|
98 | if not nb.worksheets: | |
99 | nb.worksheets.append(current.new_worksheet()) |
|
99 | nb.worksheets.append(current.new_worksheet()) | |
100 | nb.worksheets[0].cells.append(cell) |
|
100 | nb.worksheets[0].cells.append(cell) | |
101 |
|
101 | |||
102 | def new_notebook(self): |
|
102 | def new_notebook(self): | |
103 | cm = self.contents_manager |
|
103 | cm = self.contents_manager | |
104 | model = cm.create_notebook() |
|
104 | model = cm.create_notebook() | |
105 | name = model['name'] |
|
105 | name = model['name'] | |
106 | path = model['path'] |
|
106 | path = model['path'] | |
107 |
|
107 | |||
108 | full_model = cm.get(name, path) |
|
108 | full_model = cm.get_model(name, path) | |
109 | nb = full_model['content'] |
|
109 | nb = full_model['content'] | |
110 | self.add_code_cell(nb) |
|
110 | self.add_code_cell(nb) | |
111 |
|
111 | |||
112 | cm.save(full_model, name, path) |
|
112 | cm.save(full_model, name, path) | |
113 | return nb, name, path |
|
113 | return nb, name, path | |
114 |
|
114 | |||
115 | def test_create_notebook(self): |
|
115 | def test_create_notebook(self): | |
116 | cm = self.contents_manager |
|
116 | cm = self.contents_manager | |
117 | # Test in root directory |
|
117 | # Test in root directory | |
118 | model = cm.create_notebook() |
|
118 | model = cm.create_notebook() | |
119 | assert isinstance(model, dict) |
|
119 | assert isinstance(model, dict) | |
120 | self.assertIn('name', model) |
|
120 | self.assertIn('name', model) | |
121 | self.assertIn('path', model) |
|
121 | self.assertIn('path', model) | |
122 | self.assertEqual(model['name'], 'Untitled0.ipynb') |
|
122 | self.assertEqual(model['name'], 'Untitled0.ipynb') | |
123 | self.assertEqual(model['path'], '') |
|
123 | self.assertEqual(model['path'], '') | |
124 |
|
124 | |||
125 | # Test in sub-directory |
|
125 | # Test in sub-directory | |
126 | sub_dir = '/foo/' |
|
126 | sub_dir = '/foo/' | |
127 | self.make_dir(cm.root_dir, 'foo') |
|
127 | self.make_dir(cm.root_dir, 'foo') | |
128 | model = cm.create_notebook(None, sub_dir) |
|
128 | model = cm.create_notebook(None, sub_dir) | |
129 | assert isinstance(model, dict) |
|
129 | assert isinstance(model, dict) | |
130 | self.assertIn('name', model) |
|
130 | self.assertIn('name', model) | |
131 | self.assertIn('path', model) |
|
131 | self.assertIn('path', model) | |
132 | self.assertEqual(model['name'], 'Untitled0.ipynb') |
|
132 | self.assertEqual(model['name'], 'Untitled0.ipynb') | |
133 | self.assertEqual(model['path'], sub_dir.strip('/')) |
|
133 | self.assertEqual(model['path'], sub_dir.strip('/')) | |
134 |
|
134 | |||
135 | def test_get(self): |
|
135 | def test_get(self): | |
136 | cm = self.contents_manager |
|
136 | cm = self.contents_manager | |
137 | # Create a notebook |
|
137 | # Create a notebook | |
138 | model = cm.create_notebook() |
|
138 | model = cm.create_notebook() | |
139 | name = model['name'] |
|
139 | name = model['name'] | |
140 | path = model['path'] |
|
140 | path = model['path'] | |
141 |
|
141 | |||
142 | # Check that we 'get' on the notebook we just created |
|
142 | # Check that we 'get' on the notebook we just created | |
143 | model2 = cm.get(name, path) |
|
143 | model2 = cm.get_model(name, path) | |
144 | assert isinstance(model2, dict) |
|
144 | assert isinstance(model2, dict) | |
145 | self.assertIn('name', model2) |
|
145 | self.assertIn('name', model2) | |
146 | self.assertIn('path', model2) |
|
146 | self.assertIn('path', model2) | |
147 | self.assertEqual(model['name'], name) |
|
147 | self.assertEqual(model['name'], name) | |
148 | self.assertEqual(model['path'], path) |
|
148 | self.assertEqual(model['path'], path) | |
149 |
|
149 | |||
150 | # Test in sub-directory |
|
150 | # Test in sub-directory | |
151 | sub_dir = '/foo/' |
|
151 | sub_dir = '/foo/' | |
152 | self.make_dir(cm.root_dir, 'foo') |
|
152 | self.make_dir(cm.root_dir, 'foo') | |
153 | model = cm.create_notebook(None, sub_dir) |
|
153 | model = cm.create_notebook(None, sub_dir) | |
154 | model2 = cm.get(name, sub_dir) |
|
154 | model2 = cm.get_model(name, sub_dir) | |
155 | assert isinstance(model2, dict) |
|
155 | assert isinstance(model2, dict) | |
156 | self.assertIn('name', model2) |
|
156 | self.assertIn('name', model2) | |
157 | self.assertIn('path', model2) |
|
157 | self.assertIn('path', model2) | |
158 | self.assertIn('content', model2) |
|
158 | self.assertIn('content', model2) | |
159 | self.assertEqual(model2['name'], 'Untitled0.ipynb') |
|
159 | self.assertEqual(model2['name'], 'Untitled0.ipynb') | |
160 | self.assertEqual(model2['path'], sub_dir.strip('/')) |
|
160 | self.assertEqual(model2['path'], sub_dir.strip('/')) | |
161 |
|
161 | |||
162 | def test_update(self): |
|
162 | def test_update(self): | |
163 | cm = self.contents_manager |
|
163 | cm = self.contents_manager | |
164 | # Create a notebook |
|
164 | # Create a notebook | |
165 | model = cm.create_notebook() |
|
165 | model = cm.create_notebook() | |
166 | name = model['name'] |
|
166 | name = model['name'] | |
167 | path = model['path'] |
|
167 | path = model['path'] | |
168 |
|
168 | |||
169 | # Change the name in the model for rename |
|
169 | # Change the name in the model for rename | |
170 | model['name'] = 'test.ipynb' |
|
170 | model['name'] = 'test.ipynb' | |
171 | model = cm.update(model, name, path) |
|
171 | model = cm.update(model, name, path) | |
172 | assert isinstance(model, dict) |
|
172 | assert isinstance(model, dict) | |
173 | self.assertIn('name', model) |
|
173 | self.assertIn('name', model) | |
174 | self.assertIn('path', model) |
|
174 | self.assertIn('path', model) | |
175 | self.assertEqual(model['name'], 'test.ipynb') |
|
175 | self.assertEqual(model['name'], 'test.ipynb') | |
176 |
|
176 | |||
177 | # Make sure the old name is gone |
|
177 | # Make sure the old name is gone | |
178 | self.assertRaises(HTTPError, cm.get, name, path) |
|
178 | self.assertRaises(HTTPError, cm.get_model, name, path) | |
179 |
|
179 | |||
180 | # Test in sub-directory |
|
180 | # Test in sub-directory | |
181 | # Create a directory and notebook in that directory |
|
181 | # Create a directory and notebook in that directory | |
182 | sub_dir = '/foo/' |
|
182 | sub_dir = '/foo/' | |
183 | self.make_dir(cm.root_dir, 'foo') |
|
183 | self.make_dir(cm.root_dir, 'foo') | |
184 | model = cm.create_notebook(None, sub_dir) |
|
184 | model = cm.create_notebook(None, sub_dir) | |
185 | name = model['name'] |
|
185 | name = model['name'] | |
186 | path = model['path'] |
|
186 | path = model['path'] | |
187 |
|
187 | |||
188 | # Change the name in the model for rename |
|
188 | # Change the name in the model for rename | |
189 | model['name'] = 'test_in_sub.ipynb' |
|
189 | model['name'] = 'test_in_sub.ipynb' | |
190 | model = cm.update(model, name, path) |
|
190 | model = cm.update(model, name, path) | |
191 | assert isinstance(model, dict) |
|
191 | assert isinstance(model, dict) | |
192 | self.assertIn('name', model) |
|
192 | self.assertIn('name', model) | |
193 | self.assertIn('path', model) |
|
193 | self.assertIn('path', model) | |
194 | self.assertEqual(model['name'], 'test_in_sub.ipynb') |
|
194 | self.assertEqual(model['name'], 'test_in_sub.ipynb') | |
195 | self.assertEqual(model['path'], sub_dir.strip('/')) |
|
195 | self.assertEqual(model['path'], sub_dir.strip('/')) | |
196 |
|
196 | |||
197 | # Make sure the old name is gone |
|
197 | # Make sure the old name is gone | |
198 | self.assertRaises(HTTPError, cm.get, name, path) |
|
198 | self.assertRaises(HTTPError, cm.get_model, name, path) | |
199 |
|
199 | |||
200 | def test_save(self): |
|
200 | def test_save(self): | |
201 | cm = self.contents_manager |
|
201 | cm = self.contents_manager | |
202 | # Create a notebook |
|
202 | # Create a notebook | |
203 | model = cm.create_notebook() |
|
203 | model = cm.create_notebook() | |
204 | name = model['name'] |
|
204 | name = model['name'] | |
205 | path = model['path'] |
|
205 | path = model['path'] | |
206 |
|
206 | |||
207 | # Get the model with 'content' |
|
207 | # Get the model with 'content' | |
208 | full_model = cm.get(name, path) |
|
208 | full_model = cm.get_model(name, path) | |
209 |
|
209 | |||
210 | # Save the notebook |
|
210 | # Save the notebook | |
211 | model = cm.save(full_model, name, path) |
|
211 | model = cm.save(full_model, name, path) | |
212 | assert isinstance(model, dict) |
|
212 | assert isinstance(model, dict) | |
213 | self.assertIn('name', model) |
|
213 | self.assertIn('name', model) | |
214 | self.assertIn('path', model) |
|
214 | self.assertIn('path', model) | |
215 | self.assertEqual(model['name'], name) |
|
215 | self.assertEqual(model['name'], name) | |
216 | self.assertEqual(model['path'], path) |
|
216 | self.assertEqual(model['path'], path) | |
217 |
|
217 | |||
218 | # Test in sub-directory |
|
218 | # Test in sub-directory | |
219 | # Create a directory and notebook in that directory |
|
219 | # Create a directory and notebook in that directory | |
220 | sub_dir = '/foo/' |
|
220 | sub_dir = '/foo/' | |
221 | self.make_dir(cm.root_dir, 'foo') |
|
221 | self.make_dir(cm.root_dir, 'foo') | |
222 | model = cm.create_notebook(None, sub_dir) |
|
222 | model = cm.create_notebook(None, sub_dir) | |
223 | name = model['name'] |
|
223 | name = model['name'] | |
224 | path = model['path'] |
|
224 | path = model['path'] | |
225 | model = cm.get(name, path) |
|
225 | model = cm.get_model(name, path) | |
226 |
|
226 | |||
227 | # Change the name in the model for rename |
|
227 | # Change the name in the model for rename | |
228 | model = cm.save(model, name, path) |
|
228 | model = cm.save(model, name, path) | |
229 | assert isinstance(model, dict) |
|
229 | assert isinstance(model, dict) | |
230 | self.assertIn('name', model) |
|
230 | self.assertIn('name', model) | |
231 | self.assertIn('path', model) |
|
231 | self.assertIn('path', model) | |
232 | self.assertEqual(model['name'], 'Untitled0.ipynb') |
|
232 | self.assertEqual(model['name'], 'Untitled0.ipynb') | |
233 | self.assertEqual(model['path'], sub_dir.strip('/')) |
|
233 | self.assertEqual(model['path'], sub_dir.strip('/')) | |
234 |
|
234 | |||
235 | def test_delete(self): |
|
235 | def test_delete(self): | |
236 | cm = self.contents_manager |
|
236 | cm = self.contents_manager | |
237 | # Create a notebook |
|
237 | # Create a notebook | |
238 | nb, name, path = self.new_notebook() |
|
238 | nb, name, path = self.new_notebook() | |
239 |
|
239 | |||
240 | # Delete the notebook |
|
240 | # Delete the notebook | |
241 | cm.delete(name, path) |
|
241 | cm.delete(name, path) | |
242 |
|
242 | |||
243 | # Check that a 'get' on the deleted notebook raises and error |
|
243 | # Check that a 'get' on the deleted notebook raises and error | |
244 | self.assertRaises(HTTPError, cm.get, name, path) |
|
244 | self.assertRaises(HTTPError, cm.get_model, name, path) | |
245 |
|
245 | |||
246 | def test_copy(self): |
|
246 | def test_copy(self): | |
247 | cm = self.contents_manager |
|
247 | cm = self.contents_manager | |
248 | path = u'Γ₯ b' |
|
248 | path = u'Γ₯ b' | |
249 | name = u'nb β.ipynb' |
|
249 | name = u'nb β.ipynb' | |
250 | os.mkdir(os.path.join(cm.root_dir, path)) |
|
250 | os.mkdir(os.path.join(cm.root_dir, path)) | |
251 | orig = cm.create_notebook({'name' : name}, path=path) |
|
251 | orig = cm.create_notebook({'name' : name}, path=path) | |
252 |
|
252 | |||
253 | # copy with unspecified name |
|
253 | # copy with unspecified name | |
254 | copy = cm.copy(name, path=path) |
|
254 | copy = cm.copy(name, path=path) | |
255 | self.assertEqual(copy['name'], orig['name'].replace('.ipynb', '-Copy0.ipynb')) |
|
255 | self.assertEqual(copy['name'], orig['name'].replace('.ipynb', '-Copy0.ipynb')) | |
256 |
|
256 | |||
257 | # copy with specified name |
|
257 | # copy with specified name | |
258 | copy2 = cm.copy(name, u'copy 2.ipynb', path=path) |
|
258 | copy2 = cm.copy(name, u'copy 2.ipynb', path=path) | |
259 | self.assertEqual(copy2['name'], u'copy 2.ipynb') |
|
259 | self.assertEqual(copy2['name'], u'copy 2.ipynb') | |
260 |
|
260 | |||
261 | def test_trust_notebook(self): |
|
261 | def test_trust_notebook(self): | |
262 | cm = self.contents_manager |
|
262 | cm = self.contents_manager | |
263 | nb, name, path = self.new_notebook() |
|
263 | nb, name, path = self.new_notebook() | |
264 |
|
264 | |||
265 | untrusted = cm.get(name, path)['content'] |
|
265 | untrusted = cm.get_model(name, path)['content'] | |
266 | assert not cm.notary.check_cells(untrusted) |
|
266 | assert not cm.notary.check_cells(untrusted) | |
267 |
|
267 | |||
268 | # print(untrusted) |
|
268 | # print(untrusted) | |
269 | cm.trust_notebook(name, path) |
|
269 | cm.trust_notebook(name, path) | |
270 | trusted = cm.get(name, path)['content'] |
|
270 | trusted = cm.get_model(name, path)['content'] | |
271 | # print(trusted) |
|
271 | # print(trusted) | |
272 | assert cm.notary.check_cells(trusted) |
|
272 | assert cm.notary.check_cells(trusted) | |
273 |
|
273 | |||
274 | def test_mark_trusted_cells(self): |
|
274 | def test_mark_trusted_cells(self): | |
275 | cm = self.contents_manager |
|
275 | cm = self.contents_manager | |
276 | nb, name, path = self.new_notebook() |
|
276 | nb, name, path = self.new_notebook() | |
277 |
|
277 | |||
278 | cm.mark_trusted_cells(nb, name, path) |
|
278 | cm.mark_trusted_cells(nb, name, path) | |
279 | for cell in nb.worksheets[0].cells: |
|
279 | for cell in nb.worksheets[0].cells: | |
280 | if cell.cell_type == 'code': |
|
280 | if cell.cell_type == 'code': | |
281 | assert not cell.trusted |
|
281 | assert not cell.trusted | |
282 |
|
282 | |||
283 | cm.trust_notebook(name, path) |
|
283 | cm.trust_notebook(name, path) | |
284 | nb = cm.get(name, path)['content'] |
|
284 | nb = cm.get_model(name, path)['content'] | |
285 | for cell in nb.worksheets[0].cells: |
|
285 | for cell in nb.worksheets[0].cells: | |
286 | if cell.cell_type == 'code': |
|
286 | if cell.cell_type == 'code': | |
287 | assert cell.trusted |
|
287 | assert cell.trusted | |
288 |
|
288 | |||
289 | def test_check_and_sign(self): |
|
289 | def test_check_and_sign(self): | |
290 | cm = self.contents_manager |
|
290 | cm = self.contents_manager | |
291 | nb, name, path = self.new_notebook() |
|
291 | nb, name, path = self.new_notebook() | |
292 |
|
292 | |||
293 | cm.mark_trusted_cells(nb, name, path) |
|
293 | cm.mark_trusted_cells(nb, name, path) | |
294 | cm.check_and_sign(nb, name, path) |
|
294 | cm.check_and_sign(nb, name, path) | |
295 | assert not cm.notary.check_signature(nb) |
|
295 | assert not cm.notary.check_signature(nb) | |
296 |
|
296 | |||
297 | cm.trust_notebook(name, path) |
|
297 | cm.trust_notebook(name, path) | |
298 | nb = cm.get(name, path)['content'] |
|
298 | nb = cm.get_model(name, path)['content'] | |
299 | cm.mark_trusted_cells(nb, name, path) |
|
299 | cm.mark_trusted_cells(nb, name, path) | |
300 | cm.check_and_sign(nb, name, path) |
|
300 | cm.check_and_sign(nb, name, path) | |
301 | assert cm.notary.check_signature(nb) |
|
301 | assert cm.notary.check_signature(nb) |
@@ -1,448 +1,449 b'' | |||||
1 | // Copyright (c) IPython Development Team. |
|
1 | // Copyright (c) IPython Development Team. | |
2 | // Distributed under the terms of the Modified BSD License. |
|
2 | // Distributed under the terms of the Modified BSD License. | |
3 |
|
3 | |||
4 | define([ |
|
4 | define([ | |
5 | 'base/js/namespace', |
|
5 | 'base/js/namespace', | |
6 | 'jquery', |
|
6 | 'jquery', | |
7 | 'base/js/utils', |
|
7 | 'base/js/utils', | |
8 | 'base/js/dialog', |
|
8 | 'base/js/dialog', | |
9 | ], function(IPython, $, utils, dialog) { |
|
9 | ], function(IPython, $, utils, dialog) { | |
10 | "use strict"; |
|
10 | "use strict"; | |
11 |
|
11 | |||
12 | var NotebookList = function (selector, options) { |
|
12 | var NotebookList = function (selector, options) { | |
13 | // Constructor |
|
13 | // Constructor | |
14 | // |
|
14 | // | |
15 | // Parameters: |
|
15 | // Parameters: | |
16 | // selector: string |
|
16 | // selector: string | |
17 | // options: dictionary |
|
17 | // options: dictionary | |
18 | // Dictionary of keyword arguments. |
|
18 | // Dictionary of keyword arguments. | |
19 | // session_list: SessionList instance |
|
19 | // session_list: SessionList instance | |
20 | // element_name: string |
|
20 | // element_name: string | |
21 | // base_url: string |
|
21 | // base_url: string | |
22 | // notebook_path: string |
|
22 | // notebook_path: string | |
23 | var that = this; |
|
23 | var that = this; | |
24 | this.session_list = options.session_list; |
|
24 | this.session_list = options.session_list; | |
25 | // allow code re-use by just changing element_name in kernellist.js |
|
25 | // allow code re-use by just changing element_name in kernellist.js | |
26 | this.element_name = options.element_name || 'notebook'; |
|
26 | this.element_name = options.element_name || 'notebook'; | |
27 | this.selector = selector; |
|
27 | this.selector = selector; | |
28 | if (this.selector !== undefined) { |
|
28 | if (this.selector !== undefined) { | |
29 | this.element = $(selector); |
|
29 | this.element = $(selector); | |
30 | this.style(); |
|
30 | this.style(); | |
31 | this.bind_events(); |
|
31 | this.bind_events(); | |
32 | } |
|
32 | } | |
33 | this.notebooks_list = []; |
|
33 | this.notebooks_list = []; | |
34 | this.sessions = {}; |
|
34 | this.sessions = {}; | |
35 | this.base_url = options.base_url || utils.get_body_data("baseUrl"); |
|
35 | this.base_url = options.base_url || utils.get_body_data("baseUrl"); | |
36 | this.notebook_path = options.notebook_path || utils.get_body_data("notebookPath"); |
|
36 | this.notebook_path = options.notebook_path || utils.get_body_data("notebookPath"); | |
37 | if (this.session_list && this.session_list.events) { |
|
37 | if (this.session_list && this.session_list.events) { | |
38 | this.session_list.events.on('sessions_loaded.Dashboard', |
|
38 | this.session_list.events.on('sessions_loaded.Dashboard', | |
39 | function(e, d) { that.sessions_loaded(d); }); |
|
39 | function(e, d) { that.sessions_loaded(d); }); | |
40 | } |
|
40 | } | |
41 | }; |
|
41 | }; | |
42 |
|
42 | |||
43 | NotebookList.prototype.style = function () { |
|
43 | NotebookList.prototype.style = function () { | |
44 | var prefix = '#' + this.element_name; |
|
44 | var prefix = '#' + this.element_name; | |
45 | $(prefix + '_toolbar').addClass('list_toolbar'); |
|
45 | $(prefix + '_toolbar').addClass('list_toolbar'); | |
46 | $(prefix + '_list_info').addClass('toolbar_info'); |
|
46 | $(prefix + '_list_info').addClass('toolbar_info'); | |
47 | $(prefix + '_buttons').addClass('toolbar_buttons'); |
|
47 | $(prefix + '_buttons').addClass('toolbar_buttons'); | |
48 | $(prefix + '_list_header').addClass('list_header'); |
|
48 | $(prefix + '_list_header').addClass('list_header'); | |
49 | this.element.addClass("list_container"); |
|
49 | this.element.addClass("list_container"); | |
50 | }; |
|
50 | }; | |
51 |
|
51 | |||
52 |
|
52 | |||
53 | NotebookList.prototype.bind_events = function () { |
|
53 | NotebookList.prototype.bind_events = function () { | |
54 | var that = this; |
|
54 | var that = this; | |
55 | $('#refresh_' + this.element_name + '_list').click(function () { |
|
55 | $('#refresh_' + this.element_name + '_list').click(function () { | |
56 | that.load_sessions(); |
|
56 | that.load_sessions(); | |
57 | }); |
|
57 | }); | |
58 | this.element.bind('dragover', function () { |
|
58 | this.element.bind('dragover', function () { | |
59 | return false; |
|
59 | return false; | |
60 | }); |
|
60 | }); | |
61 | this.element.bind('drop', function(event){ |
|
61 | this.element.bind('drop', function(event){ | |
62 | that.handleFilesUpload(event,'drop'); |
|
62 | that.handleFilesUpload(event,'drop'); | |
63 | return false; |
|
63 | return false; | |
64 | }); |
|
64 | }); | |
65 | }; |
|
65 | }; | |
66 |
|
66 | |||
67 | NotebookList.prototype.handleFilesUpload = function(event, dropOrForm) { |
|
67 | NotebookList.prototype.handleFilesUpload = function(event, dropOrForm) { | |
68 | var that = this; |
|
68 | var that = this; | |
69 | var files; |
|
69 | var files; | |
70 | if(dropOrForm =='drop'){ |
|
70 | if(dropOrForm =='drop'){ | |
71 | files = event.originalEvent.dataTransfer.files; |
|
71 | files = event.originalEvent.dataTransfer.files; | |
72 | } else |
|
72 | } else | |
73 | { |
|
73 | { | |
74 | files = event.originalEvent.target.files; |
|
74 | files = event.originalEvent.target.files; | |
75 | } |
|
75 | } | |
76 | for (var i = 0; i < files.length; i++) { |
|
76 | for (var i = 0; i < files.length; i++) { | |
77 | var f = files[i]; |
|
77 | var f = files[i]; | |
78 | var reader = new FileReader(); |
|
78 | var reader = new FileReader(); | |
79 | reader.readAsText(f); |
|
79 | reader.readAsText(f); | |
80 | var name_and_ext = utils.splitext(f.name); |
|
80 | var name_and_ext = utils.splitext(f.name); | |
81 | var file_ext = name_and_ext[1]; |
|
81 | var file_ext = name_and_ext[1]; | |
82 | if (file_ext === '.ipynb') { |
|
82 | if (file_ext === '.ipynb') { | |
83 | var item = that.new_notebook_item(0); |
|
83 | var item = that.new_notebook_item(0); | |
84 | item.addClass('new-file'); |
|
84 | item.addClass('new-file'); | |
85 | that.add_name_input(f.name, item); |
|
85 | that.add_name_input(f.name, item); | |
86 | // Store the notebook item in the reader so we can use it later |
|
86 | // Store the notebook item in the reader so we can use it later | |
87 | // to know which item it belongs to. |
|
87 | // to know which item it belongs to. | |
88 | $(reader).data('item', item); |
|
88 | $(reader).data('item', item); | |
89 | reader.onload = function (event) { |
|
89 | reader.onload = function (event) { | |
90 | var nbitem = $(event.target).data('item'); |
|
90 | var nbitem = $(event.target).data('item'); | |
91 | that.add_notebook_data(event.target.result, nbitem); |
|
91 | that.add_notebook_data(event.target.result, nbitem); | |
92 | that.add_upload_button(nbitem); |
|
92 | that.add_upload_button(nbitem); | |
93 | }; |
|
93 | }; | |
94 | } else { |
|
94 | } else { | |
95 | var dialog_body = 'Uploaded notebooks must be .ipynb files'; |
|
95 | var dialog_body = 'Uploaded notebooks must be .ipynb files'; | |
96 | dialog.modal({ |
|
96 | dialog.modal({ | |
97 | title : 'Invalid file type', |
|
97 | title : 'Invalid file type', | |
98 | body : dialog_body, |
|
98 | body : dialog_body, | |
99 | buttons : {'OK' : {'class' : 'btn-primary'}} |
|
99 | buttons : {'OK' : {'class' : 'btn-primary'}} | |
100 | }); |
|
100 | }); | |
101 | } |
|
101 | } | |
102 | } |
|
102 | } | |
103 | // Replace the file input form wth a clone of itself. This is required to |
|
103 | // Replace the file input form wth a clone of itself. This is required to | |
104 | // reset the form. Otherwise, if you upload a file, delete it and try to |
|
104 | // reset the form. Otherwise, if you upload a file, delete it and try to | |
105 | // upload it again, the changed event won't fire. |
|
105 | // upload it again, the changed event won't fire. | |
106 | var form = $('input.fileinput'); |
|
106 | var form = $('input.fileinput'); | |
107 | form.replaceWith(form.clone(true)); |
|
107 | form.replaceWith(form.clone(true)); | |
108 | return false; |
|
108 | return false; | |
109 | }; |
|
109 | }; | |
110 |
|
110 | |||
111 | NotebookList.prototype.clear_list = function (remove_uploads) { |
|
111 | NotebookList.prototype.clear_list = function (remove_uploads) { | |
112 | // Clears the navigation tree. |
|
112 | // Clears the navigation tree. | |
113 | // |
|
113 | // | |
114 | // Parameters |
|
114 | // Parameters | |
115 | // remove_uploads: bool=False |
|
115 | // remove_uploads: bool=False | |
116 | // Should upload prompts also be removed from the tree. |
|
116 | // Should upload prompts also be removed from the tree. | |
117 | if (remove_uploads) { |
|
117 | if (remove_uploads) { | |
118 | this.element.children('.list_item').remove(); |
|
118 | this.element.children('.list_item').remove(); | |
119 | } else { |
|
119 | } else { | |
120 | this.element.children('.list_item:not(.new-file)').remove(); |
|
120 | this.element.children('.list_item:not(.new-file)').remove(); | |
121 | } |
|
121 | } | |
122 | }; |
|
122 | }; | |
123 |
|
123 | |||
124 | NotebookList.prototype.load_sessions = function(){ |
|
124 | NotebookList.prototype.load_sessions = function(){ | |
125 | this.session_list.load_sessions(); |
|
125 | this.session_list.load_sessions(); | |
126 | }; |
|
126 | }; | |
127 |
|
127 | |||
128 |
|
128 | |||
129 | NotebookList.prototype.sessions_loaded = function(data){ |
|
129 | NotebookList.prototype.sessions_loaded = function(data){ | |
130 | this.sessions = data; |
|
130 | this.sessions = data; | |
131 | this.load_list(); |
|
131 | this.load_list(); | |
132 | }; |
|
132 | }; | |
133 |
|
133 | |||
134 | NotebookList.prototype.load_list = function () { |
|
134 | NotebookList.prototype.load_list = function () { | |
135 | var that = this; |
|
135 | var that = this; | |
136 | var settings = { |
|
136 | var settings = { | |
137 | processData : false, |
|
137 | processData : false, | |
138 | cache : false, |
|
138 | cache : false, | |
139 | type : "GET", |
|
139 | type : "GET", | |
140 | dataType : "json", |
|
140 | dataType : "json", | |
141 | success : $.proxy(this.list_loaded, this), |
|
141 | success : $.proxy(this.list_loaded, this), | |
142 | error : $.proxy( function(xhr, status, error){ |
|
142 | error : $.proxy( function(xhr, status, error){ | |
143 | utils.log_ajax_error(xhr, status, error); |
|
143 | utils.log_ajax_error(xhr, status, error); | |
144 | that.list_loaded([], null, null, {msg:"Error connecting to server."}); |
|
144 | that.list_loaded([], null, null, {msg:"Error connecting to server."}); | |
145 | },this) |
|
145 | },this) | |
146 | }; |
|
146 | }; | |
147 |
|
147 | |||
148 | var url = utils.url_join_encode( |
|
148 | var url = utils.url_join_encode( | |
149 | this.base_url, |
|
149 | this.base_url, | |
150 | 'api', |
|
150 | 'api', | |
151 | 'contents', |
|
151 | 'contents', | |
152 | this.notebook_path |
|
152 | this.notebook_path | |
153 | ); |
|
153 | ); | |
154 | $.ajax(url, settings); |
|
154 | $.ajax(url, settings); | |
155 | }; |
|
155 | }; | |
156 |
|
156 | |||
157 |
|
157 | |||
158 | NotebookList.prototype.list_loaded = function (data, status, xhr, param) { |
|
158 | NotebookList.prototype.list_loaded = function (data, status, xhr, param) { | |
159 | var message = 'Notebook list empty.'; |
|
159 | var message = 'Notebook list empty.'; | |
160 | if (param !== undefined && param.msg) { |
|
160 | if (param !== undefined && param.msg) { | |
161 | message = param.msg; |
|
161 | message = param.msg; | |
162 | } |
|
162 | } | |
163 | var item = null; |
|
163 | var item = null; | |
164 |
var |
|
164 | var content = data.content; | |
|
165 | var len = content.length; | |||
165 | this.clear_list(); |
|
166 | this.clear_list(); | |
166 | if (len === 0) { |
|
167 | if (len === 0) { | |
167 | item = this.new_notebook_item(0); |
|
168 | item = this.new_notebook_item(0); | |
168 | var span12 = item.children().first(); |
|
169 | var span12 = item.children().first(); | |
169 | span12.empty(); |
|
170 | span12.empty(); | |
170 | span12.append($('<div style="margin:auto;text-align:center;color:grey"/>').text(message)); |
|
171 | span12.append($('<div style="margin:auto;text-align:center;color:grey"/>').text(message)); | |
171 | } |
|
172 | } | |
172 | var path = this.notebook_path; |
|
173 | var path = this.notebook_path; | |
173 | var offset = 0; |
|
174 | var offset = 0; | |
174 | if (path !== '') { |
|
175 | if (path !== '') { | |
175 | item = this.new_notebook_item(0); |
|
176 | item = this.new_notebook_item(0); | |
176 | this.add_dir(path, '..', item); |
|
177 | this.add_dir(path, '..', item); | |
177 | offset = 1; |
|
178 | offset = 1; | |
178 | } |
|
179 | } | |
179 | for (var i=0; i<len; i++) { |
|
180 | for (var i=0; i<len; i++) { | |
180 |
if ( |
|
181 | if (content[i].type === 'directory') { | |
181 |
var name = |
|
182 | var name = content[i].name; | |
182 | item = this.new_notebook_item(i+offset); |
|
183 | item = this.new_notebook_item(i+offset); | |
183 | this.add_dir(path, name, item); |
|
184 | this.add_dir(path, name, item); | |
184 | } else { |
|
185 | } else { | |
185 |
var name = |
|
186 | var name = content[i].name; | |
186 | item = this.new_notebook_item(i+offset); |
|
187 | item = this.new_notebook_item(i+offset); | |
187 | this.add_link(path, name, item); |
|
188 | this.add_link(path, name, item); | |
188 | name = utils.url_path_join(path, name); |
|
189 | name = utils.url_path_join(path, name); | |
189 | if(this.sessions[name] === undefined){ |
|
190 | if(this.sessions[name] === undefined){ | |
190 | this.add_delete_button(item); |
|
191 | this.add_delete_button(item); | |
191 | } else { |
|
192 | } else { | |
192 | this.add_shutdown_button(item,this.sessions[name]); |
|
193 | this.add_shutdown_button(item,this.sessions[name]); | |
193 | } |
|
194 | } | |
194 | } |
|
195 | } | |
195 | } |
|
196 | } | |
196 | }; |
|
197 | }; | |
197 |
|
198 | |||
198 |
|
199 | |||
199 | NotebookList.prototype.new_notebook_item = function (index) { |
|
200 | NotebookList.prototype.new_notebook_item = function (index) { | |
200 | var item = $('<div/>').addClass("list_item").addClass("row"); |
|
201 | var item = $('<div/>').addClass("list_item").addClass("row"); | |
201 | // item.addClass('list_item ui-widget ui-widget-content ui-helper-clearfix'); |
|
202 | // item.addClass('list_item ui-widget ui-widget-content ui-helper-clearfix'); | |
202 | // item.css('border-top-style','none'); |
|
203 | // item.css('border-top-style','none'); | |
203 | item.append($("<div/>").addClass("col-md-12").append( |
|
204 | item.append($("<div/>").addClass("col-md-12").append( | |
204 | $('<i/>').addClass('item_icon') |
|
205 | $('<i/>').addClass('item_icon') | |
205 | ).append( |
|
206 | ).append( | |
206 | $("<a/>").addClass("item_link").append( |
|
207 | $("<a/>").addClass("item_link").append( | |
207 | $("<span/>").addClass("item_name") |
|
208 | $("<span/>").addClass("item_name") | |
208 | ) |
|
209 | ) | |
209 | ).append( |
|
210 | ).append( | |
210 | $('<div/>').addClass("item_buttons btn-group pull-right") |
|
211 | $('<div/>').addClass("item_buttons btn-group pull-right") | |
211 | )); |
|
212 | )); | |
212 |
|
213 | |||
213 | if (index === -1) { |
|
214 | if (index === -1) { | |
214 | this.element.append(item); |
|
215 | this.element.append(item); | |
215 | } else { |
|
216 | } else { | |
216 | this.element.children().eq(index).after(item); |
|
217 | this.element.children().eq(index).after(item); | |
217 | } |
|
218 | } | |
218 | return item; |
|
219 | return item; | |
219 | }; |
|
220 | }; | |
220 |
|
221 | |||
221 |
|
222 | |||
222 | NotebookList.prototype.add_dir = function (path, name, item) { |
|
223 | NotebookList.prototype.add_dir = function (path, name, item) { | |
223 | item.data('name', name); |
|
224 | item.data('name', name); | |
224 | item.data('path', path); |
|
225 | item.data('path', path); | |
225 | item.find(".item_name").text(name); |
|
226 | item.find(".item_name").text(name); | |
226 | item.find(".item_icon").addClass('folder_icon').addClass('icon-fixed-width'); |
|
227 | item.find(".item_icon").addClass('folder_icon').addClass('icon-fixed-width'); | |
227 | item.find("a.item_link") |
|
228 | item.find("a.item_link") | |
228 | .attr('href', |
|
229 | .attr('href', | |
229 | utils.url_join_encode( |
|
230 | utils.url_join_encode( | |
230 | this.base_url, |
|
231 | this.base_url, | |
231 | "tree", |
|
232 | "tree", | |
232 | path, |
|
233 | path, | |
233 | name |
|
234 | name | |
234 | ) |
|
235 | ) | |
235 | ); |
|
236 | ); | |
236 | }; |
|
237 | }; | |
237 |
|
238 | |||
238 |
|
239 | |||
239 | NotebookList.prototype.add_link = function (path, nbname, item) { |
|
240 | NotebookList.prototype.add_link = function (path, nbname, item) { | |
240 | item.data('nbname', nbname); |
|
241 | item.data('nbname', nbname); | |
241 | item.data('path', path); |
|
242 | item.data('path', path); | |
242 | item.find(".item_name").text(nbname); |
|
243 | item.find(".item_name").text(nbname); | |
243 | item.find(".item_icon").addClass('notebook_icon').addClass('icon-fixed-width'); |
|
244 | item.find(".item_icon").addClass('notebook_icon').addClass('icon-fixed-width'); | |
244 | item.find("a.item_link") |
|
245 | item.find("a.item_link") | |
245 | .attr('href', |
|
246 | .attr('href', | |
246 | utils.url_join_encode( |
|
247 | utils.url_join_encode( | |
247 | this.base_url, |
|
248 | this.base_url, | |
248 | "notebooks", |
|
249 | "notebooks", | |
249 | path, |
|
250 | path, | |
250 | nbname |
|
251 | nbname | |
251 | ) |
|
252 | ) | |
252 | ).attr('target','_blank'); |
|
253 | ).attr('target','_blank'); | |
253 | }; |
|
254 | }; | |
254 |
|
255 | |||
255 |
|
256 | |||
256 | NotebookList.prototype.add_name_input = function (nbname, item) { |
|
257 | NotebookList.prototype.add_name_input = function (nbname, item) { | |
257 | item.data('nbname', nbname); |
|
258 | item.data('nbname', nbname); | |
258 | item.find(".item_icon").addClass('notebook_icon').addClass('icon-fixed-width'); |
|
259 | item.find(".item_icon").addClass('notebook_icon').addClass('icon-fixed-width'); | |
259 | item.find(".item_name").empty().append( |
|
260 | item.find(".item_name").empty().append( | |
260 | $('<input/>') |
|
261 | $('<input/>') | |
261 | .addClass("nbname_input") |
|
262 | .addClass("nbname_input") | |
262 | .attr('value', utils.splitext(nbname)[0]) |
|
263 | .attr('value', utils.splitext(nbname)[0]) | |
263 | .attr('size', '30') |
|
264 | .attr('size', '30') | |
264 | .attr('type', 'text') |
|
265 | .attr('type', 'text') | |
265 | ); |
|
266 | ); | |
266 | }; |
|
267 | }; | |
267 |
|
268 | |||
268 |
|
269 | |||
269 | NotebookList.prototype.add_notebook_data = function (data, item) { |
|
270 | NotebookList.prototype.add_notebook_data = function (data, item) { | |
270 | item.data('nbdata', data); |
|
271 | item.data('nbdata', data); | |
271 | }; |
|
272 | }; | |
272 |
|
273 | |||
273 |
|
274 | |||
274 | NotebookList.prototype.add_shutdown_button = function (item, session) { |
|
275 | NotebookList.prototype.add_shutdown_button = function (item, session) { | |
275 | var that = this; |
|
276 | var that = this; | |
276 | var shutdown_button = $("<button/>").text("Shutdown").addClass("btn btn-xs btn-danger"). |
|
277 | var shutdown_button = $("<button/>").text("Shutdown").addClass("btn btn-xs btn-danger"). | |
277 | click(function (e) { |
|
278 | click(function (e) { | |
278 | var settings = { |
|
279 | var settings = { | |
279 | processData : false, |
|
280 | processData : false, | |
280 | cache : false, |
|
281 | cache : false, | |
281 | type : "DELETE", |
|
282 | type : "DELETE", | |
282 | dataType : "json", |
|
283 | dataType : "json", | |
283 | success : function () { |
|
284 | success : function () { | |
284 | that.load_sessions(); |
|
285 | that.load_sessions(); | |
285 | }, |
|
286 | }, | |
286 | error : utils.log_ajax_error, |
|
287 | error : utils.log_ajax_error, | |
287 | }; |
|
288 | }; | |
288 | var url = utils.url_join_encode( |
|
289 | var url = utils.url_join_encode( | |
289 | that.base_url, |
|
290 | that.base_url, | |
290 | 'api/sessions', |
|
291 | 'api/sessions', | |
291 | session |
|
292 | session | |
292 | ); |
|
293 | ); | |
293 | $.ajax(url, settings); |
|
294 | $.ajax(url, settings); | |
294 | return false; |
|
295 | return false; | |
295 | }); |
|
296 | }); | |
296 | // var new_buttons = item.find('a'); // shutdown_button; |
|
297 | // var new_buttons = item.find('a'); // shutdown_button; | |
297 | item.find(".item_buttons").text("").append(shutdown_button); |
|
298 | item.find(".item_buttons").text("").append(shutdown_button); | |
298 | }; |
|
299 | }; | |
299 |
|
300 | |||
300 | NotebookList.prototype.add_delete_button = function (item) { |
|
301 | NotebookList.prototype.add_delete_button = function (item) { | |
301 | var new_buttons = $('<span/>').addClass("btn-group pull-right"); |
|
302 | var new_buttons = $('<span/>').addClass("btn-group pull-right"); | |
302 | var notebooklist = this; |
|
303 | var notebooklist = this; | |
303 | var delete_button = $("<button/>").text("Delete").addClass("btn btn-default btn-xs"). |
|
304 | var delete_button = $("<button/>").text("Delete").addClass("btn btn-default btn-xs"). | |
304 | click(function (e) { |
|
305 | click(function (e) { | |
305 | // $(this) is the button that was clicked. |
|
306 | // $(this) is the button that was clicked. | |
306 | var that = $(this); |
|
307 | var that = $(this); | |
307 | // We use the nbname and notebook_id from the parent notebook_item element's |
|
308 | // We use the nbname and notebook_id from the parent notebook_item element's | |
308 | // data because the outer scopes values change as we iterate through the loop. |
|
309 | // data because the outer scopes values change as we iterate through the loop. | |
309 | var parent_item = that.parents('div.list_item'); |
|
310 | var parent_item = that.parents('div.list_item'); | |
310 | var nbname = parent_item.data('nbname'); |
|
311 | var nbname = parent_item.data('nbname'); | |
311 | var message = 'Are you sure you want to permanently delete the notebook: ' + nbname + '?'; |
|
312 | var message = 'Are you sure you want to permanently delete the notebook: ' + nbname + '?'; | |
312 | dialog.modal({ |
|
313 | dialog.modal({ | |
313 | title : "Delete notebook", |
|
314 | title : "Delete notebook", | |
314 | body : message, |
|
315 | body : message, | |
315 | buttons : { |
|
316 | buttons : { | |
316 | Delete : { |
|
317 | Delete : { | |
317 | class: "btn-danger", |
|
318 | class: "btn-danger", | |
318 | click: function() { |
|
319 | click: function() { | |
319 | var settings = { |
|
320 | var settings = { | |
320 | processData : false, |
|
321 | processData : false, | |
321 | cache : false, |
|
322 | cache : false, | |
322 | type : "DELETE", |
|
323 | type : "DELETE", | |
323 | dataType : "json", |
|
324 | dataType : "json", | |
324 | success : function (data, status, xhr) { |
|
325 | success : function (data, status, xhr) { | |
325 | parent_item.remove(); |
|
326 | parent_item.remove(); | |
326 | }, |
|
327 | }, | |
327 | error : utils.log_ajax_error, |
|
328 | error : utils.log_ajax_error, | |
328 | }; |
|
329 | }; | |
329 | var url = utils.url_join_encode( |
|
330 | var url = utils.url_join_encode( | |
330 | notebooklist.base_url, |
|
331 | notebooklist.base_url, | |
331 | 'api/contents', |
|
332 | 'api/contents', | |
332 | notebooklist.notebook_path, |
|
333 | notebooklist.notebook_path, | |
333 | nbname |
|
334 | nbname | |
334 | ); |
|
335 | ); | |
335 | $.ajax(url, settings); |
|
336 | $.ajax(url, settings); | |
336 | } |
|
337 | } | |
337 | }, |
|
338 | }, | |
338 | Cancel : {} |
|
339 | Cancel : {} | |
339 | } |
|
340 | } | |
340 | }); |
|
341 | }); | |
341 | return false; |
|
342 | return false; | |
342 | }); |
|
343 | }); | |
343 | item.find(".item_buttons").text("").append(delete_button); |
|
344 | item.find(".item_buttons").text("").append(delete_button); | |
344 | }; |
|
345 | }; | |
345 |
|
346 | |||
346 |
|
347 | |||
347 | NotebookList.prototype.add_upload_button = function (item) { |
|
348 | NotebookList.prototype.add_upload_button = function (item) { | |
348 | var that = this; |
|
349 | var that = this; | |
349 | var upload_button = $('<button/>').text("Upload") |
|
350 | var upload_button = $('<button/>').text("Upload") | |
350 | .addClass('btn btn-primary btn-xs upload_button') |
|
351 | .addClass('btn btn-primary btn-xs upload_button') | |
351 | .click(function (e) { |
|
352 | .click(function (e) { | |
352 | var nbname = item.find('.item_name > input').val(); |
|
353 | var nbname = item.find('.item_name > input').val(); | |
353 | if (nbname.slice(nbname.length-6, nbname.length) != ".ipynb") { |
|
354 | if (nbname.slice(nbname.length-6, nbname.length) != ".ipynb") { | |
354 | nbname = nbname + ".ipynb"; |
|
355 | nbname = nbname + ".ipynb"; | |
355 | } |
|
356 | } | |
356 | var path = that.notebook_path; |
|
357 | var path = that.notebook_path; | |
357 | var nbdata = item.data('nbdata'); |
|
358 | var nbdata = item.data('nbdata'); | |
358 | var content_type = 'application/json'; |
|
359 | var content_type = 'application/json'; | |
359 | var model = { |
|
360 | var model = { | |
360 | content : JSON.parse(nbdata), |
|
361 | content : JSON.parse(nbdata), | |
361 | }; |
|
362 | }; | |
362 | var settings = { |
|
363 | var settings = { | |
363 | processData : false, |
|
364 | processData : false, | |
364 | cache : false, |
|
365 | cache : false, | |
365 | type : 'PUT', |
|
366 | type : 'PUT', | |
366 | dataType : 'json', |
|
367 | dataType : 'json', | |
367 | data : JSON.stringify(model), |
|
368 | data : JSON.stringify(model), | |
368 | headers : {'Content-Type': content_type}, |
|
369 | headers : {'Content-Type': content_type}, | |
369 | success : function (data, status, xhr) { |
|
370 | success : function (data, status, xhr) { | |
370 | that.add_link(path, nbname, item); |
|
371 | that.add_link(path, nbname, item); | |
371 | that.add_delete_button(item); |
|
372 | that.add_delete_button(item); | |
372 | }, |
|
373 | }, | |
373 | error : utils.log_ajax_error, |
|
374 | error : utils.log_ajax_error, | |
374 | }; |
|
375 | }; | |
375 |
|
376 | |||
376 | var url = utils.url_join_encode( |
|
377 | var url = utils.url_join_encode( | |
377 | that.base_url, |
|
378 | that.base_url, | |
378 | 'api/contents', |
|
379 | 'api/contents', | |
379 | that.notebook_path, |
|
380 | that.notebook_path, | |
380 | nbname |
|
381 | nbname | |
381 | ); |
|
382 | ); | |
382 | $.ajax(url, settings); |
|
383 | $.ajax(url, settings); | |
383 | return false; |
|
384 | return false; | |
384 | }); |
|
385 | }); | |
385 | var cancel_button = $('<button/>').text("Cancel") |
|
386 | var cancel_button = $('<button/>').text("Cancel") | |
386 | .addClass("btn btn-default btn-xs") |
|
387 | .addClass("btn btn-default btn-xs") | |
387 | .click(function (e) { |
|
388 | .click(function (e) { | |
388 | console.log('cancel click'); |
|
389 | console.log('cancel click'); | |
389 | item.remove(); |
|
390 | item.remove(); | |
390 | return false; |
|
391 | return false; | |
391 | }); |
|
392 | }); | |
392 | item.find(".item_buttons").empty() |
|
393 | item.find(".item_buttons").empty() | |
393 | .append(upload_button) |
|
394 | .append(upload_button) | |
394 | .append(cancel_button); |
|
395 | .append(cancel_button); | |
395 | }; |
|
396 | }; | |
396 |
|
397 | |||
397 |
|
398 | |||
398 | NotebookList.prototype.new_notebook = function(){ |
|
399 | NotebookList.prototype.new_notebook = function(){ | |
399 | var path = this.notebook_path; |
|
400 | var path = this.notebook_path; | |
400 | var base_url = this.base_url; |
|
401 | var base_url = this.base_url; | |
401 | var settings = { |
|
402 | var settings = { | |
402 | processData : false, |
|
403 | processData : false, | |
403 | cache : false, |
|
404 | cache : false, | |
404 | type : "POST", |
|
405 | type : "POST", | |
405 | dataType : "json", |
|
406 | dataType : "json", | |
406 | async : false, |
|
407 | async : false, | |
407 | success : function (data, status, xhr) { |
|
408 | success : function (data, status, xhr) { | |
408 | var notebook_name = data.name; |
|
409 | var notebook_name = data.name; | |
409 | window.open( |
|
410 | window.open( | |
410 | utils.url_join_encode( |
|
411 | utils.url_join_encode( | |
411 | base_url, |
|
412 | base_url, | |
412 | 'notebooks', |
|
413 | 'notebooks', | |
413 | path, |
|
414 | path, | |
414 | notebook_name), |
|
415 | notebook_name), | |
415 | '_blank' |
|
416 | '_blank' | |
416 | ); |
|
417 | ); | |
417 | }, |
|
418 | }, | |
418 | error : $.proxy(this.new_notebook_failed, this), |
|
419 | error : $.proxy(this.new_notebook_failed, this), | |
419 | }; |
|
420 | }; | |
420 | var url = utils.url_join_encode( |
|
421 | var url = utils.url_join_encode( | |
421 | base_url, |
|
422 | base_url, | |
422 | 'api/contents', |
|
423 | 'api/contents', | |
423 | path |
|
424 | path | |
424 | ); |
|
425 | ); | |
425 | $.ajax(url, settings); |
|
426 | $.ajax(url, settings); | |
426 | }; |
|
427 | }; | |
427 |
|
428 | |||
428 |
|
429 | |||
429 | NotebookList.prototype.new_notebook_failed = function (xhr, status, error) { |
|
430 | NotebookList.prototype.new_notebook_failed = function (xhr, status, error) { | |
430 | utils.log_ajax_error(xhr, status, error); |
|
431 | utils.log_ajax_error(xhr, status, error); | |
431 | var msg; |
|
432 | var msg; | |
432 | if (xhr.responseJSON && xhr.responseJSON.message) { |
|
433 | if (xhr.responseJSON && xhr.responseJSON.message) { | |
433 | msg = xhr.responseJSON.message; |
|
434 | msg = xhr.responseJSON.message; | |
434 | } else { |
|
435 | } else { | |
435 | msg = xhr.statusText; |
|
436 | msg = xhr.statusText; | |
436 | } |
|
437 | } | |
437 | dialog.modal({ |
|
438 | dialog.modal({ | |
438 | title : 'Creating Notebook Failed', |
|
439 | title : 'Creating Notebook Failed', | |
439 | body : "The error was: " + msg, |
|
440 | body : "The error was: " + msg, | |
440 | buttons : {'OK' : {'class' : 'btn-primary'}} |
|
441 | buttons : {'OK' : {'class' : 'btn-primary'}} | |
441 | }); |
|
442 | }); | |
442 | }; |
|
443 | }; | |
443 |
|
444 | |||
444 | // Backwards compatability. |
|
445 | // Backwards compatability. | |
445 | IPython.NotebookList = NotebookList; |
|
446 | IPython.NotebookList = NotebookList; | |
446 |
|
447 | |||
447 | return {'NotebookList': NotebookList}; |
|
448 | return {'NotebookList': NotebookList}; | |
448 | }); |
|
449 | }); |
@@ -1,101 +1,85 b'' | |||||
1 | """Tornado handlers for the tree view. |
|
1 | """Tornado handlers for the tree view.""" | |
2 |
|
2 | |||
3 | Authors: |
|
3 | # Copyright (c) IPython Development Team. | |
|
4 | # Distributed under the terms of the Modified BSD License. | |||
4 |
|
5 | |||
5 | * Brian Granger |
|
|||
6 | """ |
|
|||
7 |
|
||||
8 | #----------------------------------------------------------------------------- |
|
|||
9 | # Copyright (C) 2011 The IPython Development Team |
|
|||
10 | # |
|
|||
11 | # Distributed under the terms of the BSD License. The full license is in |
|
|||
12 | # the file COPYING, distributed as part of this software. |
|
|||
13 | #----------------------------------------------------------------------------- |
|
|||
14 |
|
||||
15 | #----------------------------------------------------------------------------- |
|
|||
16 | # Imports |
|
|||
17 | #----------------------------------------------------------------------------- |
|
|||
18 | from tornado import web |
|
6 | from tornado import web | |
19 | from ..base.handlers import IPythonHandler, notebook_path_regex, path_regex |
|
7 | from ..base.handlers import IPythonHandler, notebook_path_regex, path_regex | |
20 | from ..utils import url_path_join, url_escape |
|
8 | from ..utils import url_path_join, url_escape | |
21 |
|
9 | |||
22 | #----------------------------------------------------------------------------- |
|
|||
23 | # Handlers |
|
|||
24 | #----------------------------------------------------------------------------- |
|
|||
25 |
|
||||
26 |
|
10 | |||
27 | class TreeHandler(IPythonHandler): |
|
11 | class TreeHandler(IPythonHandler): | |
28 | """Render the tree view, listing notebooks, clusters, etc.""" |
|
12 | """Render the tree view, listing notebooks, clusters, etc.""" | |
29 |
|
13 | |||
30 | def generate_breadcrumbs(self, path): |
|
14 | def generate_breadcrumbs(self, path): | |
31 | breadcrumbs = [(url_escape(url_path_join(self.base_url, 'tree')), '')] |
|
15 | breadcrumbs = [(url_escape(url_path_join(self.base_url, 'tree')), '')] | |
32 | comps = path.split('/') |
|
16 | comps = path.split('/') | |
33 | ncomps = len(comps) |
|
17 | ncomps = len(comps) | |
34 | for i in range(ncomps): |
|
18 | for i in range(ncomps): | |
35 | if comps[i]: |
|
19 | if comps[i]: | |
36 | link = url_escape(url_path_join(self.base_url, 'tree', *comps[0:i+1])) |
|
20 | link = url_escape(url_path_join(self.base_url, 'tree', *comps[0:i+1])) | |
37 | breadcrumbs.append((link, comps[i])) |
|
21 | breadcrumbs.append((link, comps[i])) | |
38 | return breadcrumbs |
|
22 | return breadcrumbs | |
39 |
|
23 | |||
40 | def generate_page_title(self, path): |
|
24 | def generate_page_title(self, path): | |
41 | comps = path.split('/') |
|
25 | comps = path.split('/') | |
42 | if len(comps) > 3: |
|
26 | if len(comps) > 3: | |
43 | for i in range(len(comps)-2): |
|
27 | for i in range(len(comps)-2): | |
44 | comps.pop(0) |
|
28 | comps.pop(0) | |
45 | page_title = url_path_join(*comps) |
|
29 | page_title = url_path_join(*comps) | |
46 | if page_title: |
|
30 | if page_title: | |
47 | return page_title+'/' |
|
31 | return page_title+'/' | |
48 | else: |
|
32 | else: | |
49 | return 'Home' |
|
33 | return 'Home' | |
50 |
|
34 | |||
51 | @web.authenticated |
|
35 | @web.authenticated | |
52 | def get(self, path='', name=None): |
|
36 | def get(self, path='', name=None): | |
53 | path = path.strip('/') |
|
37 | path = path.strip('/') | |
54 | cm = self.contents_manager |
|
38 | cm = self.contents_manager | |
55 | if name is not None: |
|
39 | if name is not None: | |
56 | # is a notebook, redirect to notebook handler |
|
40 | # is a notebook, redirect to notebook handler | |
57 | url = url_escape(url_path_join( |
|
41 | url = url_escape(url_path_join( | |
58 | self.base_url, 'notebooks', path, name |
|
42 | self.base_url, 'notebooks', path, name | |
59 | )) |
|
43 | )) | |
60 | self.log.debug("Redirecting %s to %s", self.request.path, url) |
|
44 | self.log.debug("Redirecting %s to %s", self.request.path, url) | |
61 | self.redirect(url) |
|
45 | self.redirect(url) | |
62 | else: |
|
46 | else: | |
63 | if not cm.path_exists(path=path): |
|
47 | if not cm.path_exists(path=path): | |
64 | # Directory is hidden or does not exist. |
|
48 | # Directory is hidden or does not exist. | |
65 | raise web.HTTPError(404) |
|
49 | raise web.HTTPError(404) | |
66 | elif cm.is_hidden(path): |
|
50 | elif cm.is_hidden(path): | |
67 | self.log.info("Refusing to serve hidden directory, via 404 Error") |
|
51 | self.log.info("Refusing to serve hidden directory, via 404 Error") | |
68 | raise web.HTTPError(404) |
|
52 | raise web.HTTPError(404) | |
69 | breadcrumbs = self.generate_breadcrumbs(path) |
|
53 | breadcrumbs = self.generate_breadcrumbs(path) | |
70 | page_title = self.generate_page_title(path) |
|
54 | page_title = self.generate_page_title(path) | |
71 | self.write(self.render_template('tree.html', |
|
55 | self.write(self.render_template('tree.html', | |
72 | project=self.project_dir, |
|
56 | project=self.project_dir, | |
73 | page_title=page_title, |
|
57 | page_title=page_title, | |
74 | notebook_path=path, |
|
58 | notebook_path=path, | |
75 | breadcrumbs=breadcrumbs |
|
59 | breadcrumbs=breadcrumbs | |
76 | )) |
|
60 | )) | |
77 |
|
61 | |||
78 |
|
62 | |||
79 | class TreeRedirectHandler(IPythonHandler): |
|
63 | class TreeRedirectHandler(IPythonHandler): | |
80 | """Redirect a request to the corresponding tree URL""" |
|
64 | """Redirect a request to the corresponding tree URL""" | |
81 |
|
65 | |||
82 | @web.authenticated |
|
66 | @web.authenticated | |
83 | def get(self, path=''): |
|
67 | def get(self, path=''): | |
84 | url = url_escape(url_path_join( |
|
68 | url = url_escape(url_path_join( | |
85 | self.base_url, 'tree', path.strip('/') |
|
69 | self.base_url, 'tree', path.strip('/') | |
86 | )) |
|
70 | )) | |
87 | self.log.debug("Redirecting %s to %s", self.request.path, url) |
|
71 | self.log.debug("Redirecting %s to %s", self.request.path, url) | |
88 | self.redirect(url) |
|
72 | self.redirect(url) | |
89 |
|
73 | |||
90 |
|
74 | |||
91 | #----------------------------------------------------------------------------- |
|
75 | #----------------------------------------------------------------------------- | |
92 | # URL to handler mappings |
|
76 | # URL to handler mappings | |
93 | #----------------------------------------------------------------------------- |
|
77 | #----------------------------------------------------------------------------- | |
94 |
|
78 | |||
95 |
|
79 | |||
96 | default_handlers = [ |
|
80 | default_handlers = [ | |
97 | (r"/tree%s" % notebook_path_regex, TreeHandler), |
|
81 | (r"/tree%s" % notebook_path_regex, TreeHandler), | |
98 | (r"/tree%s" % path_regex, TreeHandler), |
|
82 | (r"/tree%s" % path_regex, TreeHandler), | |
99 | (r"/tree", TreeHandler), |
|
83 | (r"/tree", TreeHandler), | |
100 | (r"", TreeRedirectHandler), |
|
84 | (r"", TreeRedirectHandler), | |
101 | ] |
|
85 | ] |
General Comments 0
You need to be logged in to leave comments.
Login now