Show More
@@ -1,772 +1,819 b'' | |||||
1 | """A contents manager that uses the local file system for storage.""" |
|
1 | """A contents manager that uses the local file system for storage.""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | import base64 |
|
6 | import base64 | |
7 | from contextlib import contextmanager |
|
7 | from contextlib import contextmanager | |
8 | import errno |
|
8 | import errno | |
9 | import io |
|
9 | import io | |
10 | import os |
|
10 | import os | |
11 | import shutil |
|
11 | import shutil | |
12 | import mimetypes |
|
12 | import mimetypes | |
13 |
|
13 | |||
14 | from tornado import web |
|
14 | from tornado import web | |
15 |
|
15 | |||
16 | from .manager import ( |
|
16 | from .manager import ( | |
17 | CheckpointManager, |
|
17 | CheckpointManager, | |
18 | ContentsManager, |
|
18 | ContentsManager, | |
19 | ) |
|
19 | ) | |
20 | from IPython import nbformat |
|
20 | from IPython import nbformat | |
21 | from IPython.utils.io import atomic_writing |
|
21 | from IPython.utils.io import atomic_writing | |
22 | from IPython.utils.importstring import import_item |
|
22 | from IPython.utils.importstring import import_item | |
23 | from IPython.utils.path import ensure_dir_exists |
|
23 | from IPython.utils.path import ensure_dir_exists | |
24 | from IPython.utils.traitlets import Any, Unicode, Bool, TraitError |
|
24 | from IPython.utils.traitlets import Any, Unicode, Bool, TraitError | |
25 | from IPython.utils.py3compat import getcwd, string_types, str_to_unicode |
|
25 | from IPython.utils.py3compat import getcwd, string_types, str_to_unicode | |
26 | from IPython.utils import tz |
|
26 | from IPython.utils import tz | |
27 | from IPython.html.utils import ( |
|
27 | from IPython.html.utils import ( | |
28 | is_hidden, |
|
28 | is_hidden, | |
29 | to_api_path, |
|
29 | to_api_path, | |
30 | to_os_path, |
|
30 | to_os_path, | |
31 | ) |
|
31 | ) | |
32 |
|
32 | |||
33 | _script_exporter = None |
|
33 | _script_exporter = None | |
34 |
|
34 | |||
35 | def _post_save_script(model, os_path, contents_manager, **kwargs): |
|
35 | def _post_save_script(model, os_path, contents_manager, **kwargs): | |
36 | """convert notebooks to Python script after save with nbconvert |
|
36 | """convert notebooks to Python script after save with nbconvert | |
37 |
|
37 | |||
38 | replaces `ipython notebook --script` |
|
38 | replaces `ipython notebook --script` | |
39 | """ |
|
39 | """ | |
40 | from IPython.nbconvert.exporters.script import ScriptExporter |
|
40 | from IPython.nbconvert.exporters.script import ScriptExporter | |
41 |
|
41 | |||
42 | if model['type'] != 'notebook': |
|
42 | if model['type'] != 'notebook': | |
43 | return |
|
43 | return | |
44 |
|
44 | |||
45 | global _script_exporter |
|
45 | global _script_exporter | |
46 | if _script_exporter is None: |
|
46 | if _script_exporter is None: | |
47 | _script_exporter = ScriptExporter(parent=contents_manager) |
|
47 | _script_exporter = ScriptExporter(parent=contents_manager) | |
48 | log = contents_manager.log |
|
48 | log = contents_manager.log | |
49 |
|
49 | |||
50 | base, ext = os.path.splitext(os_path) |
|
50 | base, ext = os.path.splitext(os_path) | |
51 | py_fname = base + '.py' |
|
51 | py_fname = base + '.py' | |
52 | script, resources = _script_exporter.from_filename(os_path) |
|
52 | script, resources = _script_exporter.from_filename(os_path) | |
53 | script_fname = base + resources.get('output_extension', '.txt') |
|
53 | script_fname = base + resources.get('output_extension', '.txt') | |
54 | log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir)) |
|
54 | log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir)) | |
55 | with io.open(script_fname, 'w', encoding='utf-8') as f: |
|
55 | with io.open(script_fname, 'w', encoding='utf-8') as f: | |
56 | f.write(script) |
|
56 | f.write(script) | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | class FileManagerMixin(object): |
|
59 | class FileManagerMixin(object): | |
60 | """ |
|
60 | """ | |
61 | Mixin for ContentsAPI classes that interact with the filesystem. |
|
61 | Mixin for ContentsAPI classes that interact with the filesystem. | |
62 |
|
62 | |||
63 | Provides facilities for reading, writing, and copying both notebooks and |
|
63 | Provides facilities for reading, writing, and copying both notebooks and | |
64 | generic files. |
|
64 | generic files. | |
65 |
|
65 | |||
66 | Shared by FileContentsManager and FileCheckpointManager. |
|
66 | Shared by FileContentsManager and FileCheckpointManager. | |
67 |
|
67 | |||
68 | Note |
|
68 | Note | |
69 | ---- |
|
69 | ---- | |
70 | Classes using this mixin must provide the following attributes: |
|
70 | Classes using this mixin must provide the following attributes: | |
71 |
|
71 | |||
72 | root_dir : unicode |
|
72 | root_dir : unicode | |
73 | A directory against against which API-style paths are to be resolved. |
|
73 | A directory against against which API-style paths are to be resolved. | |
74 |
|
74 | |||
75 | log : logging.Logger |
|
75 | log : logging.Logger | |
76 | """ |
|
76 | """ | |
77 |
|
77 | |||
78 | @contextmanager |
|
78 | @contextmanager | |
79 | def open(self, os_path, *args, **kwargs): |
|
79 | def open(self, os_path, *args, **kwargs): | |
80 | """wrapper around io.open that turns permission errors into 403""" |
|
80 | """wrapper around io.open that turns permission errors into 403""" | |
81 | with self.perm_to_403(os_path): |
|
81 | with self.perm_to_403(os_path): | |
82 | with io.open(os_path, *args, **kwargs) as f: |
|
82 | with io.open(os_path, *args, **kwargs) as f: | |
83 | yield f |
|
83 | yield f | |
84 |
|
84 | |||
85 | @contextmanager |
|
85 | @contextmanager | |
86 | def atomic_writing(self, os_path, *args, **kwargs): |
|
86 | def atomic_writing(self, os_path, *args, **kwargs): | |
87 | """wrapper around atomic_writing that turns permission errors into 403""" |
|
87 | """wrapper around atomic_writing that turns permission errors into 403""" | |
88 | with self.perm_to_403(os_path): |
|
88 | with self.perm_to_403(os_path): | |
89 | with atomic_writing(os_path, *args, **kwargs) as f: |
|
89 | with atomic_writing(os_path, *args, **kwargs) as f: | |
90 | yield f |
|
90 | yield f | |
91 |
|
91 | |||
92 | @contextmanager |
|
92 | @contextmanager | |
93 | def perm_to_403(self, os_path=''): |
|
93 | def perm_to_403(self, os_path=''): | |
94 | """context manager for turning permission errors into 403.""" |
|
94 | """context manager for turning permission errors into 403.""" | |
95 | try: |
|
95 | try: | |
96 | yield |
|
96 | yield | |
97 | except OSError as e: |
|
97 | except OSError as e: | |
98 | if e.errno in {errno.EPERM, errno.EACCES}: |
|
98 | if e.errno in {errno.EPERM, errno.EACCES}: | |
99 | # make 403 error message without root prefix |
|
99 | # make 403 error message without root prefix | |
100 | # this may not work perfectly on unicode paths on Python 2, |
|
100 | # this may not work perfectly on unicode paths on Python 2, | |
101 | # but nobody should be doing that anyway. |
|
101 | # but nobody should be doing that anyway. | |
102 | if not os_path: |
|
102 | if not os_path: | |
103 | os_path = str_to_unicode(e.filename or 'unknown file') |
|
103 | os_path = str_to_unicode(e.filename or 'unknown file') | |
104 | path = to_api_path(os_path, root=self.root_dir) |
|
104 | path = to_api_path(os_path, root=self.root_dir) | |
105 | raise web.HTTPError(403, u'Permission denied: %s' % path) |
|
105 | raise web.HTTPError(403, u'Permission denied: %s' % path) | |
106 | else: |
|
106 | else: | |
107 | raise |
|
107 | raise | |
108 |
|
108 | |||
109 | def _copy(self, src, dest): |
|
109 | def _copy(self, src, dest): | |
110 | """copy src to dest |
|
110 | """copy src to dest | |
111 |
|
111 | |||
112 | like shutil.copy2, but log errors in copystat |
|
112 | like shutil.copy2, but log errors in copystat | |
113 | """ |
|
113 | """ | |
114 | shutil.copyfile(src, dest) |
|
114 | shutil.copyfile(src, dest) | |
115 | try: |
|
115 | try: | |
116 | shutil.copystat(src, dest) |
|
116 | shutil.copystat(src, dest) | |
117 | except OSError: |
|
117 | except OSError: | |
118 | self.log.debug("copystat on %s failed", dest, exc_info=True) |
|
118 | self.log.debug("copystat on %s failed", dest, exc_info=True) | |
119 |
|
119 | |||
120 | def _get_os_path(self, path): |
|
120 | def _get_os_path(self, path): | |
121 | """Given an API path, return its file system path. |
|
121 | """Given an API path, return its file system path. | |
122 |
|
122 | |||
123 | Parameters |
|
123 | Parameters | |
124 | ---------- |
|
124 | ---------- | |
125 | path : string |
|
125 | path : string | |
126 | The relative API path to the named file. |
|
126 | The relative API path to the named file. | |
127 |
|
127 | |||
128 | Returns |
|
128 | Returns | |
129 | ------- |
|
129 | ------- | |
130 | path : string |
|
130 | path : string | |
131 | Native, absolute OS path to for a file. |
|
131 | Native, absolute OS path to for a file. | |
132 | """ |
|
132 | """ | |
133 | return to_os_path(path, self.root_dir) |
|
133 | return to_os_path(path, self.root_dir) | |
134 |
|
134 | |||
135 | def _read_notebook(self, os_path, as_version=4): |
|
135 | def _read_notebook(self, os_path, as_version=4): | |
136 | """Read a notebook from an os path.""" |
|
136 | """Read a notebook from an os path.""" | |
137 | with self.open(os_path, 'r', encoding='utf-8') as f: |
|
137 | with self.open(os_path, 'r', encoding='utf-8') as f: | |
138 | try: |
|
138 | try: | |
139 | return nbformat.read(f, as_version=as_version) |
|
139 | return nbformat.read(f, as_version=as_version) | |
140 | except Exception as e: |
|
140 | except Exception as e: | |
141 | raise web.HTTPError( |
|
141 | raise web.HTTPError( | |
142 | 400, |
|
142 | 400, | |
143 | u"Unreadable Notebook: %s %r" % (os_path, e), |
|
143 | u"Unreadable Notebook: %s %r" % (os_path, e), | |
144 | ) |
|
144 | ) | |
145 |
|
145 | |||
146 | def _save_notebook(self, os_path, nb): |
|
146 | def _save_notebook(self, os_path, nb): | |
147 | """Save a notebook to an os_path.""" |
|
147 | """Save a notebook to an os_path.""" | |
148 | with self.atomic_writing(os_path, encoding='utf-8') as f: |
|
148 | with self.atomic_writing(os_path, encoding='utf-8') as f: | |
149 | nbformat.write(nb, f, version=nbformat.NO_CONVERT) |
|
149 | nbformat.write(nb, f, version=nbformat.NO_CONVERT) | |
150 |
|
150 | |||
151 | def _read_file(self, os_path, format): |
|
151 | def _read_file(self, os_path, format): | |
152 | """Read a non-notebook file. |
|
152 | """Read a non-notebook file. | |
153 |
|
153 | |||
154 | os_path: The path to be read. |
|
154 | os_path: The path to be read. | |
155 | format: |
|
155 | format: | |
156 | If 'text', the contents will be decoded as UTF-8. |
|
156 | If 'text', the contents will be decoded as UTF-8. | |
157 | If 'base64', the raw bytes contents will be encoded as base64. |
|
157 | If 'base64', the raw bytes contents will be encoded as base64. | |
158 | If not specified, try to decode as UTF-8, and fall back to base64 |
|
158 | If not specified, try to decode as UTF-8, and fall back to base64 | |
159 | """ |
|
159 | """ | |
160 | if not os.path.isfile(os_path): |
|
160 | if not os.path.isfile(os_path): | |
161 | raise web.HTTPError(400, "Cannot read non-file %s" % os_path) |
|
161 | raise web.HTTPError(400, "Cannot read non-file %s" % os_path) | |
162 |
|
162 | |||
163 | with self.open(os_path, 'rb') as f: |
|
163 | with self.open(os_path, 'rb') as f: | |
164 | bcontent = f.read() |
|
164 | bcontent = f.read() | |
165 |
|
165 | |||
166 | if format is None or format == 'text': |
|
166 | if format is None or format == 'text': | |
167 | # Try to interpret as unicode if format is unknown or if unicode |
|
167 | # Try to interpret as unicode if format is unknown or if unicode | |
168 | # was explicitly requested. |
|
168 | # was explicitly requested. | |
169 | try: |
|
169 | try: | |
170 | return bcontent.decode('utf8'), 'text' |
|
170 | return bcontent.decode('utf8'), 'text' | |
171 | except UnicodeError as e: |
|
171 | except UnicodeError as e: | |
172 | if format == 'text': |
|
172 | if format == 'text': | |
173 | raise web.HTTPError( |
|
173 | raise web.HTTPError( | |
174 | 400, |
|
174 | 400, | |
175 | "%s is not UTF-8 encoded" % os_path, |
|
175 | "%s is not UTF-8 encoded" % os_path, | |
176 | reason='bad format', |
|
176 | reason='bad format', | |
177 | ) |
|
177 | ) | |
178 | return base64.encodestring(bcontent).decode('ascii'), 'base64' |
|
178 | return base64.encodestring(bcontent).decode('ascii'), 'base64' | |
179 |
|
179 | |||
180 | def _save_file(self, os_path, content, format): |
|
180 | def _save_file(self, os_path, content, format): | |
181 | """Save content of a generic file.""" |
|
181 | """Save content of a generic file.""" | |
182 | if format not in {'text', 'base64'}: |
|
182 | if format not in {'text', 'base64'}: | |
183 | raise web.HTTPError( |
|
183 | raise web.HTTPError( | |
184 | 400, |
|
184 | 400, | |
185 | "Must specify format of file contents as 'text' or 'base64'", |
|
185 | "Must specify format of file contents as 'text' or 'base64'", | |
186 | ) |
|
186 | ) | |
187 | try: |
|
187 | try: | |
188 | if format == 'text': |
|
188 | if format == 'text': | |
189 | bcontent = content.encode('utf8') |
|
189 | bcontent = content.encode('utf8') | |
190 | else: |
|
190 | else: | |
191 | b64_bytes = content.encode('ascii') |
|
191 | b64_bytes = content.encode('ascii') | |
192 | bcontent = base64.decodestring(b64_bytes) |
|
192 | bcontent = base64.decodestring(b64_bytes) | |
193 | except Exception as e: |
|
193 | except Exception as e: | |
194 | raise web.HTTPError(400, u'Encoding error saving %s: %s' % (os_path, e)) |
|
194 | raise web.HTTPError(400, u'Encoding error saving %s: %s' % (os_path, e)) | |
195 |
|
195 | |||
196 | with self.atomic_writing(os_path, text=False) as f: |
|
196 | with self.atomic_writing(os_path, text=False) as f: | |
197 | f.write(bcontent) |
|
197 | f.write(bcontent) | |
198 |
|
198 | |||
199 |
|
199 | |||
200 | class FileCheckpointManager(FileManagerMixin, CheckpointManager): |
|
200 | class FileCheckpointManager(FileManagerMixin, CheckpointManager): | |
201 | """ |
|
201 | """ | |
202 | A CheckpointManager that caches checkpoints for files in adjacent |
|
202 | A CheckpointManager that caches checkpoints for files in adjacent | |
203 | directories. |
|
203 | directories. | |
204 | """ |
|
204 | """ | |
205 |
|
205 | |||
206 | checkpoint_dir = Unicode( |
|
206 | checkpoint_dir = Unicode( | |
207 | '.ipynb_checkpoints', |
|
207 | '.ipynb_checkpoints', | |
208 | config=True, |
|
208 | config=True, | |
209 | help="""The directory name in which to keep file checkpoints |
|
209 | help="""The directory name in which to keep file checkpoints | |
210 |
|
210 | |||
211 | This is a path relative to the file's own directory. |
|
211 | This is a path relative to the file's own directory. | |
212 |
|
212 | |||
213 | By default, it is .ipynb_checkpoints |
|
213 | By default, it is .ipynb_checkpoints | |
214 | """, |
|
214 | """, | |
215 | ) |
|
215 | ) | |
216 |
|
216 | |||
217 | root_dir = Unicode(config=True) |
|
217 | root_dir = Unicode(config=True) | |
218 |
|
218 | |||
219 | def _root_dir_default(self): |
|
219 | def _root_dir_default(self): | |
220 | try: |
|
220 | try: | |
221 | return self.parent.root_dir |
|
221 | return self.parent.root_dir | |
222 | except AttributeError: |
|
222 | except AttributeError: | |
223 | return getcwd() |
|
223 | return getcwd() | |
224 |
|
224 | |||
225 |
# |
|
225 | # ContentsManager-dependent checkpoint API | |
226 |
def create_ |
|
226 | def create_checkpoint(self, contents_mgr, path): | |
227 | """Create a checkpoint from the current content of a notebook.""" |
|
227 | """ | |
228 | path = path.strip('/') |
|
228 | Create a checkpoint. | |
229 | # only the one checkpoint ID: |
|
|||
230 | checkpoint_id = u"checkpoint" |
|
|||
231 | os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) |
|
|||
232 | self.log.debug("creating checkpoint for %s", path) |
|
|||
233 | with self.perm_to_403(): |
|
|||
234 | self._save_file(os_checkpoint_path, content, format=format) |
|
|||
235 |
|
||||
236 | # return the checkpoint info |
|
|||
237 | return self.checkpoint_model(checkpoint_id, os_checkpoint_path) |
|
|||
238 |
|
||||
239 | def create_notebook_checkpoint(self, nb, path): |
|
|||
240 | """Create a checkpoint from the current content of a notebook.""" |
|
|||
241 | path = path.strip('/') |
|
|||
242 | # only the one checkpoint ID: |
|
|||
243 | checkpoint_id = u"checkpoint" |
|
|||
244 | os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) |
|
|||
245 | self.log.debug("creating checkpoint for %s", path) |
|
|||
246 | with self.perm_to_403(): |
|
|||
247 | self._save_notebook(os_checkpoint_path, nb) |
|
|||
248 |
|
|
229 | ||
249 | # return the checkpoint info |
|
230 | If contents_mgr is backed by the local filesystem, just copy the | |
250 | return self.checkpoint_model(checkpoint_id, os_checkpoint_path) |
|
231 | appropriate file to the checkpoint directory. Otherwise, ask the | |
|
232 | ContentsManager for a model and write it ourselves. | |||
|
233 | """ | |||
|
234 | if contents_mgr.backend == 'local_file': | |||
|
235 | # We know that the file is in the local filesystem, so just copy | |||
|
236 | # from the base location to our location. | |||
|
237 | checkpoint_id = u'checkpoint' | |||
|
238 | src_path = contents_mgr._get_os_path(path) | |||
|
239 | dest_path = self.checkpoint_path(checkpoint_id, path) | |||
|
240 | self._copy(src_path, dest_path) | |||
|
241 | return self.checkpoint_model(checkpoint_id, dest_path) | |||
|
242 | else: | |||
|
243 | return super(FileCheckpointManager, self).create_checkpoint( | |||
|
244 | contents_mgr, path, | |||
|
245 | ) | |||
251 |
|
246 | |||
252 |
def |
|
247 | def restore_checkpoint(self, contents_mgr, checkpoint_id, path): | |
253 | """Get the content of a checkpoint. |
|
248 | """ | |
|
249 | Restore a checkpoint. | |||
254 |
|
250 | |||
255 | Returns a model suitable for passing to ContentsManager.save. |
|
251 | If contents_mgr is backed by the local filesystem, just copy the | |
|
252 | appropriate file from the checkpoint directory. Otherwise, load the | |||
|
253 | model and pass it to ContentsManager.save. | |||
256 | """ |
|
254 | """ | |
257 | path = path.strip('/') |
|
255 | if contents_mgr.backend == 'local_file': | |
258 | self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) |
|
256 | # We know that the file is in the local filesystem, so just copy | |
259 | os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) |
|
257 | # from our base location to the location expected by content | |
260 | if not os.path.isfile(os_checkpoint_path): |
|
258 | src_path = self.checkpoint_path(checkpoint_id, path) | |
261 | self.no_such_checkpoint(path, checkpoint_id) |
|
259 | dest_path = contents_mgr._get_os_path(path) | |
262 | if type == 'notebook': |
|
260 | self._copy(src_path, dest_path) | |
263 | return { |
|
|||
264 | 'type': type, |
|
|||
265 | 'content': self._read_notebook( |
|
|||
266 | os_checkpoint_path, |
|
|||
267 | as_version=4, |
|
|||
268 | ), |
|
|||
269 | } |
|
|||
270 | else: |
|
261 | else: | |
271 | content, format = self._read_file(os_checkpoint_path, format=None) |
|
262 | super(FileCheckpointManager, self).restore_checkpoint( | |
272 | return { |
|
263 | contents_mgr, checkpoint_id, path | |
273 | 'type': type, |
|
264 | ) | |
274 | 'content': content, |
|
|||
275 | 'format': format, |
|
|||
276 | } |
|
|||
277 |
|
265 | |||
|
266 | # ContentsManager-independent checkpoint API | |||
278 | def rename_checkpoint(self, checkpoint_id, old_path, new_path): |
|
267 | def rename_checkpoint(self, checkpoint_id, old_path, new_path): | |
279 | """Rename a checkpoint from old_path to new_path.""" |
|
268 | """Rename a checkpoint from old_path to new_path.""" | |
280 | old_cp_path = self.checkpoint_path(checkpoint_id, old_path) |
|
269 | old_cp_path = self.checkpoint_path(checkpoint_id, old_path) | |
281 | new_cp_path = self.checkpoint_path(checkpoint_id, new_path) |
|
270 | new_cp_path = self.checkpoint_path(checkpoint_id, new_path) | |
282 | if os.path.isfile(old_cp_path): |
|
271 | if os.path.isfile(old_cp_path): | |
283 | self.log.debug( |
|
272 | self.log.debug( | |
284 | "Renaming checkpoint %s -> %s", |
|
273 | "Renaming checkpoint %s -> %s", | |
285 | old_cp_path, |
|
274 | old_cp_path, | |
286 | new_cp_path, |
|
275 | new_cp_path, | |
287 | ) |
|
276 | ) | |
288 | with self.perm_to_403(): |
|
277 | with self.perm_to_403(): | |
289 | shutil.move(old_cp_path, new_cp_path) |
|
278 | shutil.move(old_cp_path, new_cp_path) | |
290 |
|
279 | |||
291 | def delete_checkpoint(self, checkpoint_id, path): |
|
280 | def delete_checkpoint(self, checkpoint_id, path): | |
292 | """delete a file's checkpoint""" |
|
281 | """delete a file's checkpoint""" | |
293 | path = path.strip('/') |
|
282 | path = path.strip('/') | |
294 | cp_path = self.checkpoint_path(checkpoint_id, path) |
|
283 | cp_path = self.checkpoint_path(checkpoint_id, path) | |
295 | if not os.path.isfile(cp_path): |
|
284 | if not os.path.isfile(cp_path): | |
296 | self.no_such_checkpoint(path, checkpoint_id) |
|
285 | self.no_such_checkpoint(path, checkpoint_id) | |
297 |
|
286 | |||
298 | self.log.debug("unlinking %s", cp_path) |
|
287 | self.log.debug("unlinking %s", cp_path) | |
299 | with self.perm_to_403(): |
|
288 | with self.perm_to_403(): | |
300 | os.unlink(cp_path) |
|
289 | os.unlink(cp_path) | |
301 |
|
290 | |||
302 | def list_checkpoints(self, path): |
|
291 | def list_checkpoints(self, path): | |
303 | """list the checkpoints for a given file |
|
292 | """list the checkpoints for a given file | |
304 |
|
293 | |||
305 | This contents manager currently only supports one checkpoint per file. |
|
294 | This contents manager currently only supports one checkpoint per file. | |
306 | """ |
|
295 | """ | |
307 | path = path.strip('/') |
|
296 | path = path.strip('/') | |
308 | checkpoint_id = "checkpoint" |
|
297 | checkpoint_id = "checkpoint" | |
309 | os_path = self.checkpoint_path(checkpoint_id, path) |
|
298 | os_path = self.checkpoint_path(checkpoint_id, path) | |
310 | if not os.path.isfile(os_path): |
|
299 | if not os.path.isfile(os_path): | |
311 | return [] |
|
300 | return [] | |
312 | else: |
|
301 | else: | |
313 | return [self.checkpoint_model(checkpoint_id, os_path)] |
|
302 | return [self.checkpoint_model(checkpoint_id, os_path)] | |
314 |
|
303 | |||
315 | # Checkpoint-related utilities |
|
304 | # Checkpoint-related utilities | |
316 | def checkpoint_path(self, checkpoint_id, path): |
|
305 | def checkpoint_path(self, checkpoint_id, path): | |
317 | """find the path to a checkpoint""" |
|
306 | """find the path to a checkpoint""" | |
318 | path = path.strip('/') |
|
307 | path = path.strip('/') | |
319 | parent, name = ('/' + path).rsplit('/', 1) |
|
308 | parent, name = ('/' + path).rsplit('/', 1) | |
320 | parent = parent.strip('/') |
|
309 | parent = parent.strip('/') | |
321 | basename, ext = os.path.splitext(name) |
|
310 | basename, ext = os.path.splitext(name) | |
322 | filename = u"{name}-{checkpoint_id}{ext}".format( |
|
311 | filename = u"{name}-{checkpoint_id}{ext}".format( | |
323 | name=basename, |
|
312 | name=basename, | |
324 | checkpoint_id=checkpoint_id, |
|
313 | checkpoint_id=checkpoint_id, | |
325 | ext=ext, |
|
314 | ext=ext, | |
326 | ) |
|
315 | ) | |
327 | os_path = self._get_os_path(path=parent) |
|
316 | os_path = self._get_os_path(path=parent) | |
328 | cp_dir = os.path.join(os_path, self.checkpoint_dir) |
|
317 | cp_dir = os.path.join(os_path, self.checkpoint_dir) | |
329 | with self.perm_to_403(): |
|
318 | with self.perm_to_403(): | |
330 | ensure_dir_exists(cp_dir) |
|
319 | ensure_dir_exists(cp_dir) | |
331 | cp_path = os.path.join(cp_dir, filename) |
|
320 | cp_path = os.path.join(cp_dir, filename) | |
332 | return cp_path |
|
321 | return cp_path | |
333 |
|
322 | |||
334 | def checkpoint_model(self, checkpoint_id, os_path): |
|
323 | def checkpoint_model(self, checkpoint_id, os_path): | |
335 | """construct the info dict for a given checkpoint""" |
|
324 | """construct the info dict for a given checkpoint""" | |
336 | stats = os.stat(os_path) |
|
325 | stats = os.stat(os_path) | |
337 | last_modified = tz.utcfromtimestamp(stats.st_mtime) |
|
326 | last_modified = tz.utcfromtimestamp(stats.st_mtime) | |
338 | info = dict( |
|
327 | info = dict( | |
339 | id=checkpoint_id, |
|
328 | id=checkpoint_id, | |
340 | last_modified=last_modified, |
|
329 | last_modified=last_modified, | |
341 | ) |
|
330 | ) | |
342 | return info |
|
331 | return info | |
343 |
|
332 | |||
|
333 | def create_file_checkpoint(self, content, format, path): | |||
|
334 | """Create a checkpoint from the current content of a notebook.""" | |||
|
335 | path = path.strip('/') | |||
|
336 | # only the one checkpoint ID: | |||
|
337 | checkpoint_id = u"checkpoint" | |||
|
338 | os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) | |||
|
339 | self.log.debug("creating checkpoint for %s", path) | |||
|
340 | with self.perm_to_403(): | |||
|
341 | self._save_file(os_checkpoint_path, content, format=format) | |||
|
342 | ||||
|
343 | # return the checkpoint info | |||
|
344 | return self.checkpoint_model(checkpoint_id, os_checkpoint_path) | |||
|
345 | ||||
|
346 | def create_notebook_checkpoint(self, nb, path): | |||
|
347 | """Create a checkpoint from the current content of a notebook.""" | |||
|
348 | path = path.strip('/') | |||
|
349 | # only the one checkpoint ID: | |||
|
350 | checkpoint_id = u"checkpoint" | |||
|
351 | os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) | |||
|
352 | self.log.debug("creating checkpoint for %s", path) | |||
|
353 | with self.perm_to_403(): | |||
|
354 | self._save_notebook(os_checkpoint_path, nb) | |||
|
355 | ||||
|
356 | # return the checkpoint info | |||
|
357 | return self.checkpoint_model(checkpoint_id, os_checkpoint_path) | |||
|
358 | ||||
|
359 | def get_checkpoint(self, checkpoint_id, path, type): | |||
|
360 | """Get the content of a checkpoint. | |||
|
361 | ||||
|
362 | Returns a model suitable for passing to ContentsManager.save. | |||
|
363 | """ | |||
|
364 | path = path.strip('/') | |||
|
365 | self.log.info("restoring %s from checkpoint %s", path, checkpoint_id) | |||
|
366 | os_checkpoint_path = self.checkpoint_path(checkpoint_id, path) | |||
|
367 | if not os.path.isfile(os_checkpoint_path): | |||
|
368 | self.no_such_checkpoint(path, checkpoint_id) | |||
|
369 | ||||
|
370 | if type == 'notebook': | |||
|
371 | return { | |||
|
372 | 'type': type, | |||
|
373 | 'content': self._read_notebook( | |||
|
374 | os_checkpoint_path, | |||
|
375 | as_version=4, | |||
|
376 | ), | |||
|
377 | } | |||
|
378 | elif type == 'file': | |||
|
379 | content, format = self._read_file(os_checkpoint_path, format=None) | |||
|
380 | return { | |||
|
381 | 'type': type, | |||
|
382 | 'content': content, | |||
|
383 | 'format': format, | |||
|
384 | } | |||
|
385 | else: | |||
|
386 | raise web.HTTPError( | |||
|
387 | 500, | |||
|
388 | u'Unexpected type %s' % type | |||
|
389 | ) | |||
|
390 | ||||
344 | # Error Handling |
|
391 | # Error Handling | |
345 | def no_such_checkpoint(self, path, checkpoint_id): |
|
392 | def no_such_checkpoint(self, path, checkpoint_id): | |
346 | raise web.HTTPError( |
|
393 | raise web.HTTPError( | |
347 | 404, |
|
394 | 404, | |
348 | u'Checkpoint does not exist: %s@%s' % (path, checkpoint_id) |
|
395 | u'Checkpoint does not exist: %s@%s' % (path, checkpoint_id) | |
349 | ) |
|
396 | ) | |
350 |
|
397 | |||
351 |
|
398 | |||
352 | class FileContentsManager(FileManagerMixin, ContentsManager): |
|
399 | class FileContentsManager(FileManagerMixin, ContentsManager): | |
353 |
|
400 | |||
354 | root_dir = Unicode(config=True) |
|
401 | root_dir = Unicode(config=True) | |
355 |
|
402 | |||
356 | def _root_dir_default(self): |
|
403 | def _root_dir_default(self): | |
357 | try: |
|
404 | try: | |
358 | return self.parent.notebook_dir |
|
405 | return self.parent.notebook_dir | |
359 | except AttributeError: |
|
406 | except AttributeError: | |
360 | return getcwd() |
|
407 | return getcwd() | |
361 |
|
408 | |||
362 | save_script = Bool(False, config=True, help='DEPRECATED, use post_save_hook') |
|
409 | save_script = Bool(False, config=True, help='DEPRECATED, use post_save_hook') | |
363 | def _save_script_changed(self): |
|
410 | def _save_script_changed(self): | |
364 | self.log.warn(""" |
|
411 | self.log.warn(""" | |
365 | `--script` is deprecated. You can trigger nbconvert via pre- or post-save hooks: |
|
412 | `--script` is deprecated. You can trigger nbconvert via pre- or post-save hooks: | |
366 |
|
413 | |||
367 | ContentsManager.pre_save_hook |
|
414 | ContentsManager.pre_save_hook | |
368 | FileContentsManager.post_save_hook |
|
415 | FileContentsManager.post_save_hook | |
369 |
|
416 | |||
370 | A post-save hook has been registered that calls: |
|
417 | A post-save hook has been registered that calls: | |
371 |
|
418 | |||
372 | ipython nbconvert --to script [notebook] |
|
419 | ipython nbconvert --to script [notebook] | |
373 |
|
420 | |||
374 | which behaves similarly to `--script`. |
|
421 | which behaves similarly to `--script`. | |
375 | """) |
|
422 | """) | |
376 |
|
423 | |||
377 | self.post_save_hook = _post_save_script |
|
424 | self.post_save_hook = _post_save_script | |
378 |
|
425 | |||
379 | post_save_hook = Any(None, config=True, |
|
426 | post_save_hook = Any(None, config=True, | |
380 | help="""Python callable or importstring thereof |
|
427 | help="""Python callable or importstring thereof | |
381 |
|
428 | |||
382 | to be called on the path of a file just saved. |
|
429 | to be called on the path of a file just saved. | |
383 |
|
430 | |||
384 | This can be used to process the file on disk, |
|
431 | This can be used to process the file on disk, | |
385 | such as converting the notebook to a script or HTML via nbconvert. |
|
432 | such as converting the notebook to a script or HTML via nbconvert. | |
386 |
|
433 | |||
387 | It will be called as (all arguments passed by keyword): |
|
434 | It will be called as (all arguments passed by keyword): | |
388 |
|
435 | |||
389 | hook(os_path=os_path, model=model, contents_manager=instance) |
|
436 | hook(os_path=os_path, model=model, contents_manager=instance) | |
390 |
|
437 | |||
391 | path: the filesystem path to the file just written |
|
438 | path: the filesystem path to the file just written | |
392 | model: the model representing the file |
|
439 | model: the model representing the file | |
393 | contents_manager: this ContentsManager instance |
|
440 | contents_manager: this ContentsManager instance | |
394 | """ |
|
441 | """ | |
395 | ) |
|
442 | ) | |
396 | def _post_save_hook_changed(self, name, old, new): |
|
443 | def _post_save_hook_changed(self, name, old, new): | |
397 | if new and isinstance(new, string_types): |
|
444 | if new and isinstance(new, string_types): | |
398 | self.post_save_hook = import_item(self.post_save_hook) |
|
445 | self.post_save_hook = import_item(self.post_save_hook) | |
399 | elif new: |
|
446 | elif new: | |
400 | if not callable(new): |
|
447 | if not callable(new): | |
401 | raise TraitError("post_save_hook must be callable") |
|
448 | raise TraitError("post_save_hook must be callable") | |
402 |
|
449 | |||
403 | def run_post_save_hook(self, model, os_path): |
|
450 | def run_post_save_hook(self, model, os_path): | |
404 | """Run the post-save hook if defined, and log errors""" |
|
451 | """Run the post-save hook if defined, and log errors""" | |
405 | if self.post_save_hook: |
|
452 | if self.post_save_hook: | |
406 | try: |
|
453 | try: | |
407 | self.log.debug("Running post-save hook on %s", os_path) |
|
454 | self.log.debug("Running post-save hook on %s", os_path) | |
408 | self.post_save_hook(os_path=os_path, model=model, contents_manager=self) |
|
455 | self.post_save_hook(os_path=os_path, model=model, contents_manager=self) | |
409 | except Exception: |
|
456 | except Exception: | |
410 | self.log.error("Post-save hook failed on %s", os_path, exc_info=True) |
|
457 | self.log.error("Post-save hook failed on %s", os_path, exc_info=True) | |
411 |
|
458 | |||
412 | def _root_dir_changed(self, name, old, new): |
|
459 | def _root_dir_changed(self, name, old, new): | |
413 | """Do a bit of validation of the root_dir.""" |
|
460 | """Do a bit of validation of the root_dir.""" | |
414 | if not os.path.isabs(new): |
|
461 | if not os.path.isabs(new): | |
415 | # If we receive a non-absolute path, make it absolute. |
|
462 | # If we receive a non-absolute path, make it absolute. | |
416 | self.root_dir = os.path.abspath(new) |
|
463 | self.root_dir = os.path.abspath(new) | |
417 | return |
|
464 | return | |
418 | if not os.path.isdir(new): |
|
465 | if not os.path.isdir(new): | |
419 | raise TraitError("%r is not a directory" % new) |
|
466 | raise TraitError("%r is not a directory" % new) | |
420 |
|
467 | |||
421 | def _checkpoint_manager_class_default(self): |
|
468 | def _checkpoint_manager_class_default(self): | |
422 | return FileCheckpointManager |
|
469 | return FileCheckpointManager | |
423 |
|
470 | |||
|
471 | def _backend_default(self): | |||
|
472 | return 'local_file' | |||
|
473 | ||||
424 | def is_hidden(self, path): |
|
474 | def is_hidden(self, path): | |
425 | """Does the API style path correspond to a hidden directory or file? |
|
475 | """Does the API style path correspond to a hidden directory or file? | |
426 |
|
476 | |||
427 | Parameters |
|
477 | Parameters | |
428 | ---------- |
|
478 | ---------- | |
429 | path : string |
|
479 | path : string | |
430 | The path to check. This is an API path (`/` separated, |
|
480 | The path to check. This is an API path (`/` separated, | |
431 | relative to root_dir). |
|
481 | relative to root_dir). | |
432 |
|
482 | |||
433 | Returns |
|
483 | Returns | |
434 | ------- |
|
484 | ------- | |
435 | hidden : bool |
|
485 | hidden : bool | |
436 | Whether the path exists and is hidden. |
|
486 | Whether the path exists and is hidden. | |
437 | """ |
|
487 | """ | |
438 | path = path.strip('/') |
|
488 | path = path.strip('/') | |
439 | os_path = self._get_os_path(path=path) |
|
489 | os_path = self._get_os_path(path=path) | |
440 | return is_hidden(os_path, self.root_dir) |
|
490 | return is_hidden(os_path, self.root_dir) | |
441 |
|
491 | |||
442 | def file_exists(self, path): |
|
492 | def file_exists(self, path): | |
443 | """Returns True if the file exists, else returns False. |
|
493 | """Returns True if the file exists, else returns False. | |
444 |
|
494 | |||
445 | API-style wrapper for os.path.isfile |
|
495 | API-style wrapper for os.path.isfile | |
446 |
|
496 | |||
447 | Parameters |
|
497 | Parameters | |
448 | ---------- |
|
498 | ---------- | |
449 | path : string |
|
499 | path : string | |
450 | The relative path to the file (with '/' as separator) |
|
500 | The relative path to the file (with '/' as separator) | |
451 |
|
501 | |||
452 | Returns |
|
502 | Returns | |
453 | ------- |
|
503 | ------- | |
454 | exists : bool |
|
504 | exists : bool | |
455 | Whether the file exists. |
|
505 | Whether the file exists. | |
456 | """ |
|
506 | """ | |
457 | path = path.strip('/') |
|
507 | path = path.strip('/') | |
458 | os_path = self._get_os_path(path) |
|
508 | os_path = self._get_os_path(path) | |
459 | return os.path.isfile(os_path) |
|
509 | return os.path.isfile(os_path) | |
460 |
|
510 | |||
461 | def dir_exists(self, path): |
|
511 | def dir_exists(self, path): | |
462 | """Does the API-style path refer to an extant directory? |
|
512 | """Does the API-style path refer to an extant directory? | |
463 |
|
513 | |||
464 | API-style wrapper for os.path.isdir |
|
514 | API-style wrapper for os.path.isdir | |
465 |
|
515 | |||
466 | Parameters |
|
516 | Parameters | |
467 | ---------- |
|
517 | ---------- | |
468 | path : string |
|
518 | path : string | |
469 | The path to check. This is an API path (`/` separated, |
|
519 | The path to check. This is an API path (`/` separated, | |
470 | relative to root_dir). |
|
520 | relative to root_dir). | |
471 |
|
521 | |||
472 | Returns |
|
522 | Returns | |
473 | ------- |
|
523 | ------- | |
474 | exists : bool |
|
524 | exists : bool | |
475 | Whether the path is indeed a directory. |
|
525 | Whether the path is indeed a directory. | |
476 | """ |
|
526 | """ | |
477 | path = path.strip('/') |
|
527 | path = path.strip('/') | |
478 | os_path = self._get_os_path(path=path) |
|
528 | os_path = self._get_os_path(path=path) | |
479 | return os.path.isdir(os_path) |
|
529 | return os.path.isdir(os_path) | |
480 |
|
530 | |||
481 | def exists(self, path): |
|
531 | def exists(self, path): | |
482 | """Returns True if the path exists, else returns False. |
|
532 | """Returns True if the path exists, else returns False. | |
483 |
|
533 | |||
484 | API-style wrapper for os.path.exists |
|
534 | API-style wrapper for os.path.exists | |
485 |
|
535 | |||
486 | Parameters |
|
536 | Parameters | |
487 | ---------- |
|
537 | ---------- | |
488 | path : string |
|
538 | path : string | |
489 | The API path to the file (with '/' as separator) |
|
539 | The API path to the file (with '/' as separator) | |
490 |
|
540 | |||
491 | Returns |
|
541 | Returns | |
492 | ------- |
|
542 | ------- | |
493 | exists : bool |
|
543 | exists : bool | |
494 | Whether the target exists. |
|
544 | Whether the target exists. | |
495 | """ |
|
545 | """ | |
496 | path = path.strip('/') |
|
546 | path = path.strip('/') | |
497 | os_path = self._get_os_path(path=path) |
|
547 | os_path = self._get_os_path(path=path) | |
498 | return os.path.exists(os_path) |
|
548 | return os.path.exists(os_path) | |
499 |
|
549 | |||
500 | def _base_model(self, path): |
|
550 | def _base_model(self, path): | |
501 | """Build the common base of a contents model""" |
|
551 | """Build the common base of a contents model""" | |
502 | os_path = self._get_os_path(path) |
|
552 | os_path = self._get_os_path(path) | |
503 | info = os.stat(os_path) |
|
553 | info = os.stat(os_path) | |
504 | last_modified = tz.utcfromtimestamp(info.st_mtime) |
|
554 | last_modified = tz.utcfromtimestamp(info.st_mtime) | |
505 | created = tz.utcfromtimestamp(info.st_ctime) |
|
555 | created = tz.utcfromtimestamp(info.st_ctime) | |
506 | # Create the base model. |
|
556 | # Create the base model. | |
507 | model = {} |
|
557 | model = {} | |
508 | model['name'] = path.rsplit('/', 1)[-1] |
|
558 | model['name'] = path.rsplit('/', 1)[-1] | |
509 | model['path'] = path |
|
559 | model['path'] = path | |
510 | model['last_modified'] = last_modified |
|
560 | model['last_modified'] = last_modified | |
511 | model['created'] = created |
|
561 | model['created'] = created | |
512 | model['content'] = None |
|
562 | model['content'] = None | |
513 | model['format'] = None |
|
563 | model['format'] = None | |
514 | model['mimetype'] = None |
|
564 | model['mimetype'] = None | |
515 | try: |
|
565 | try: | |
516 | model['writable'] = os.access(os_path, os.W_OK) |
|
566 | model['writable'] = os.access(os_path, os.W_OK) | |
517 | except OSError: |
|
567 | except OSError: | |
518 | self.log.error("Failed to check write permissions on %s", os_path) |
|
568 | self.log.error("Failed to check write permissions on %s", os_path) | |
519 | model['writable'] = False |
|
569 | model['writable'] = False | |
520 | return model |
|
570 | return model | |
521 |
|
571 | |||
522 | def _dir_model(self, path, content=True): |
|
572 | def _dir_model(self, path, content=True): | |
523 | """Build a model for a directory |
|
573 | """Build a model for a directory | |
524 |
|
574 | |||
525 | if content is requested, will include a listing of the directory |
|
575 | if content is requested, will include a listing of the directory | |
526 | """ |
|
576 | """ | |
527 | os_path = self._get_os_path(path) |
|
577 | os_path = self._get_os_path(path) | |
528 |
|
578 | |||
529 | four_o_four = u'directory does not exist: %r' % path |
|
579 | four_o_four = u'directory does not exist: %r' % path | |
530 |
|
580 | |||
531 | if not os.path.isdir(os_path): |
|
581 | if not os.path.isdir(os_path): | |
532 | raise web.HTTPError(404, four_o_four) |
|
582 | raise web.HTTPError(404, four_o_four) | |
533 | elif is_hidden(os_path, self.root_dir): |
|
583 | elif is_hidden(os_path, self.root_dir): | |
534 | self.log.info("Refusing to serve hidden directory %r, via 404 Error", |
|
584 | self.log.info("Refusing to serve hidden directory %r, via 404 Error", | |
535 | os_path |
|
585 | os_path | |
536 | ) |
|
586 | ) | |
537 | raise web.HTTPError(404, four_o_four) |
|
587 | raise web.HTTPError(404, four_o_four) | |
538 |
|
588 | |||
539 | model = self._base_model(path) |
|
589 | model = self._base_model(path) | |
540 | model['type'] = 'directory' |
|
590 | model['type'] = 'directory' | |
541 | if content: |
|
591 | if content: | |
542 | model['content'] = contents = [] |
|
592 | model['content'] = contents = [] | |
543 | os_dir = self._get_os_path(path) |
|
593 | os_dir = self._get_os_path(path) | |
544 | for name in os.listdir(os_dir): |
|
594 | for name in os.listdir(os_dir): | |
545 | os_path = os.path.join(os_dir, name) |
|
595 | os_path = os.path.join(os_dir, name) | |
546 | # skip over broken symlinks in listing |
|
596 | # skip over broken symlinks in listing | |
547 | if not os.path.exists(os_path): |
|
597 | if not os.path.exists(os_path): | |
548 | self.log.warn("%s doesn't exist", os_path) |
|
598 | self.log.warn("%s doesn't exist", os_path) | |
549 | continue |
|
599 | continue | |
550 | elif not os.path.isfile(os_path) and not os.path.isdir(os_path): |
|
600 | elif not os.path.isfile(os_path) and not os.path.isdir(os_path): | |
551 | self.log.debug("%s not a regular file", os_path) |
|
601 | self.log.debug("%s not a regular file", os_path) | |
552 | continue |
|
602 | continue | |
553 | if self.should_list(name) and not is_hidden(os_path, self.root_dir): |
|
603 | if self.should_list(name) and not is_hidden(os_path, self.root_dir): | |
554 | contents.append(self.get( |
|
604 | contents.append(self.get( | |
555 | path='%s/%s' % (path, name), |
|
605 | path='%s/%s' % (path, name), | |
556 | content=False) |
|
606 | content=False) | |
557 | ) |
|
607 | ) | |
558 |
|
608 | |||
559 | model['format'] = 'json' |
|
609 | model['format'] = 'json' | |
560 |
|
610 | |||
561 | return model |
|
611 | return model | |
562 |
|
612 | |||
563 | def _file_model(self, path, content=True, format=None): |
|
613 | def _file_model(self, path, content=True, format=None): | |
564 | """Build a model for a file |
|
614 | """Build a model for a file | |
565 |
|
615 | |||
566 | if content is requested, include the file contents. |
|
616 | if content is requested, include the file contents. | |
567 |
|
617 | |||
568 | format: |
|
618 | format: | |
569 | If 'text', the contents will be decoded as UTF-8. |
|
619 | If 'text', the contents will be decoded as UTF-8. | |
570 | If 'base64', the raw bytes contents will be encoded as base64. |
|
620 | If 'base64', the raw bytes contents will be encoded as base64. | |
571 | If not specified, try to decode as UTF-8, and fall back to base64 |
|
621 | If not specified, try to decode as UTF-8, and fall back to base64 | |
572 | """ |
|
622 | """ | |
573 | model = self._base_model(path) |
|
623 | model = self._base_model(path) | |
574 | model['type'] = 'file' |
|
624 | model['type'] = 'file' | |
575 |
|
625 | |||
576 | os_path = self._get_os_path(path) |
|
626 | os_path = self._get_os_path(path) | |
577 |
|
627 | |||
578 | if content: |
|
628 | if content: | |
579 | content, format = self._read_file(os_path, format) |
|
629 | content, format = self._read_file(os_path, format) | |
580 | default_mime = { |
|
630 | default_mime = { | |
581 | 'text': 'text/plain', |
|
631 | 'text': 'text/plain', | |
582 | 'base64': 'application/octet-stream' |
|
632 | 'base64': 'application/octet-stream' | |
583 | }[format] |
|
633 | }[format] | |
584 |
|
634 | |||
585 | model.update( |
|
635 | model.update( | |
586 | content=content, |
|
636 | content=content, | |
587 | format=format, |
|
637 | format=format, | |
588 | mimetype=mimetypes.guess_type(os_path)[0] or default_mime, |
|
638 | mimetype=mimetypes.guess_type(os_path)[0] or default_mime, | |
589 | ) |
|
639 | ) | |
590 |
|
640 | |||
591 | return model |
|
641 | return model | |
592 |
|
642 | |||
593 | def _notebook_model(self, path, content=True): |
|
643 | def _notebook_model(self, path, content=True): | |
594 | """Build a notebook model |
|
644 | """Build a notebook model | |
595 |
|
645 | |||
596 | if content is requested, the notebook content will be populated |
|
646 | if content is requested, the notebook content will be populated | |
597 | as a JSON structure (not double-serialized) |
|
647 | as a JSON structure (not double-serialized) | |
598 | """ |
|
648 | """ | |
599 | model = self._base_model(path) |
|
649 | model = self._base_model(path) | |
600 | model['type'] = 'notebook' |
|
650 | model['type'] = 'notebook' | |
601 | if content: |
|
651 | if content: | |
602 | os_path = self._get_os_path(path) |
|
652 | os_path = self._get_os_path(path) | |
603 | nb = self._read_notebook(os_path, as_version=4) |
|
653 | nb = self._read_notebook(os_path, as_version=4) | |
604 | self.mark_trusted_cells(nb, path) |
|
654 | self.mark_trusted_cells(nb, path) | |
605 | model['content'] = nb |
|
655 | model['content'] = nb | |
606 | model['format'] = 'json' |
|
656 | model['format'] = 'json' | |
607 | self.validate_notebook_model(model) |
|
657 | self.validate_notebook_model(model) | |
608 | return model |
|
658 | return model | |
609 |
|
659 | |||
610 | def get(self, path, content=True, type=None, format=None): |
|
660 | def get(self, path, content=True, type=None, format=None): | |
611 | """ Takes a path for an entity and returns its model |
|
661 | """ Takes a path for an entity and returns its model | |
612 |
|
662 | |||
613 | Parameters |
|
663 | Parameters | |
614 | ---------- |
|
664 | ---------- | |
615 | path : str |
|
665 | path : str | |
616 | the API path that describes the relative path for the target |
|
666 | the API path that describes the relative path for the target | |
617 | content : bool |
|
667 | content : bool | |
618 | Whether to include the contents in the reply |
|
668 | Whether to include the contents in the reply | |
619 | type : str, optional |
|
669 | type : str, optional | |
620 | The requested type - 'file', 'notebook', or 'directory'. |
|
670 | The requested type - 'file', 'notebook', or 'directory'. | |
621 | Will raise HTTPError 400 if the content doesn't match. |
|
671 | Will raise HTTPError 400 if the content doesn't match. | |
622 | format : str, optional |
|
672 | format : str, optional | |
623 | The requested format for file contents. 'text' or 'base64'. |
|
673 | The requested format for file contents. 'text' or 'base64'. | |
624 | Ignored if this returns a notebook or directory model. |
|
674 | Ignored if this returns a notebook or directory model. | |
625 |
|
675 | |||
626 | Returns |
|
676 | Returns | |
627 | ------- |
|
677 | ------- | |
628 | model : dict |
|
678 | model : dict | |
629 | the contents model. If content=True, returns the contents |
|
679 | the contents model. If content=True, returns the contents | |
630 | of the file or directory as well. |
|
680 | of the file or directory as well. | |
631 | """ |
|
681 | """ | |
632 | path = path.strip('/') |
|
682 | path = path.strip('/') | |
633 |
|
683 | |||
634 | if not self.exists(path): |
|
684 | if not self.exists(path): | |
635 | raise web.HTTPError(404, u'No such file or directory: %s' % path) |
|
685 | raise web.HTTPError(404, u'No such file or directory: %s' % path) | |
636 |
|
686 | |||
637 | os_path = self._get_os_path(path) |
|
687 | os_path = self._get_os_path(path) | |
638 | if os.path.isdir(os_path): |
|
688 | if os.path.isdir(os_path): | |
639 | if type not in (None, 'directory'): |
|
689 | if type not in (None, 'directory'): | |
640 | raise web.HTTPError(400, |
|
690 | raise web.HTTPError(400, | |
641 | u'%s is a directory, not a %s' % (path, type), reason='bad type') |
|
691 | u'%s is a directory, not a %s' % (path, type), reason='bad type') | |
642 | model = self._dir_model(path, content=content) |
|
692 | model = self._dir_model(path, content=content) | |
643 | elif type == 'notebook' or (type is None and path.endswith('.ipynb')): |
|
693 | elif type == 'notebook' or (type is None and path.endswith('.ipynb')): | |
644 | model = self._notebook_model(path, content=content) |
|
694 | model = self._notebook_model(path, content=content) | |
645 | else: |
|
695 | else: | |
646 | if type == 'directory': |
|
696 | if type == 'directory': | |
647 | raise web.HTTPError(400, |
|
697 | raise web.HTTPError(400, | |
648 | u'%s is not a directory', reason='bad type') |
|
698 | u'%s is not a directory', reason='bad type') | |
649 | model = self._file_model(path, content=content, format=format) |
|
699 | model = self._file_model(path, content=content, format=format) | |
650 | return model |
|
700 | return model | |
651 |
|
701 | |||
652 | def _save_directory(self, os_path, model, path=''): |
|
702 | def _save_directory(self, os_path, model, path=''): | |
653 | """create a directory""" |
|
703 | """create a directory""" | |
654 | if is_hidden(os_path, self.root_dir): |
|
704 | if is_hidden(os_path, self.root_dir): | |
655 | raise web.HTTPError(400, u'Cannot create hidden directory %r' % os_path) |
|
705 | raise web.HTTPError(400, u'Cannot create hidden directory %r' % os_path) | |
656 | if not os.path.exists(os_path): |
|
706 | if not os.path.exists(os_path): | |
657 | with self.perm_to_403(): |
|
707 | with self.perm_to_403(): | |
658 | os.mkdir(os_path) |
|
708 | os.mkdir(os_path) | |
659 | elif not os.path.isdir(os_path): |
|
709 | elif not os.path.isdir(os_path): | |
660 | raise web.HTTPError(400, u'Not a directory: %s' % (os_path)) |
|
710 | raise web.HTTPError(400, u'Not a directory: %s' % (os_path)) | |
661 | else: |
|
711 | else: | |
662 | self.log.debug("Directory %r already exists", os_path) |
|
712 | self.log.debug("Directory %r already exists", os_path) | |
663 |
|
713 | |||
664 | def save(self, model, path=''): |
|
714 | def save(self, model, path=''): | |
665 | """Save the file model and return the model with no content.""" |
|
715 | """Save the file model and return the model with no content.""" | |
666 | path = path.strip('/') |
|
716 | path = path.strip('/') | |
667 |
|
717 | |||
668 | if 'type' not in model: |
|
718 | if 'type' not in model: | |
669 | raise web.HTTPError(400, u'No file type provided') |
|
719 | raise web.HTTPError(400, u'No file type provided') | |
670 | if 'content' not in model and model['type'] != 'directory': |
|
720 | if 'content' not in model and model['type'] != 'directory': | |
671 | raise web.HTTPError(400, u'No file content provided') |
|
721 | raise web.HTTPError(400, u'No file content provided') | |
672 |
|
722 | |||
673 | self.run_pre_save_hook(model=model, path=path) |
|
723 | self.run_pre_save_hook(model=model, path=path) | |
674 |
|
724 | |||
675 | os_path = self._get_os_path(path) |
|
725 | os_path = self._get_os_path(path) | |
676 | self.log.debug("Saving %s", os_path) |
|
726 | self.log.debug("Saving %s", os_path) | |
677 | try: |
|
727 | try: | |
678 | if model['type'] == 'notebook': |
|
728 | if model['type'] == 'notebook': | |
679 | nb = nbformat.from_dict(model['content']) |
|
729 | nb = nbformat.from_dict(model['content']) | |
680 | self.check_and_sign(nb, path) |
|
730 | self.check_and_sign(nb, path) | |
681 | self._save_notebook(os_path, nb) |
|
731 | self._save_notebook(os_path, nb) | |
682 | # One checkpoint should always exist for notebooks. |
|
732 | # One checkpoint should always exist for notebooks. | |
683 | if not self.checkpoint_manager.list_checkpoints(path): |
|
733 | if not self.checkpoint_manager.list_checkpoints(path): | |
684 |
self. |
|
734 | self.create_checkpoint(path) | |
685 | nb, |
|
|||
686 | path, |
|
|||
687 | ) |
|
|||
688 | elif model['type'] == 'file': |
|
735 | elif model['type'] == 'file': | |
689 | # Missing format will be handled internally by _save_file. |
|
736 | # Missing format will be handled internally by _save_file. | |
690 | self._save_file(os_path, model['content'], model.get('format')) |
|
737 | self._save_file(os_path, model['content'], model.get('format')) | |
691 | elif model['type'] == 'directory': |
|
738 | elif model['type'] == 'directory': | |
692 | self._save_directory(os_path, model, path) |
|
739 | self._save_directory(os_path, model, path) | |
693 | else: |
|
740 | else: | |
694 | raise web.HTTPError(400, "Unhandled contents type: %s" % model['type']) |
|
741 | raise web.HTTPError(400, "Unhandled contents type: %s" % model['type']) | |
695 | except web.HTTPError: |
|
742 | except web.HTTPError: | |
696 | raise |
|
743 | raise | |
697 | except Exception as e: |
|
744 | except Exception as e: | |
698 | self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) |
|
745 | self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True) | |
699 | raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e)) |
|
746 | raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e)) | |
700 |
|
747 | |||
701 | validation_message = None |
|
748 | validation_message = None | |
702 | if model['type'] == 'notebook': |
|
749 | if model['type'] == 'notebook': | |
703 | self.validate_notebook_model(model) |
|
750 | self.validate_notebook_model(model) | |
704 | validation_message = model.get('message', None) |
|
751 | validation_message = model.get('message', None) | |
705 |
|
752 | |||
706 | model = self.get(path, content=False) |
|
753 | model = self.get(path, content=False) | |
707 | if validation_message: |
|
754 | if validation_message: | |
708 | model['message'] = validation_message |
|
755 | model['message'] = validation_message | |
709 |
|
756 | |||
710 | self.run_post_save_hook(model=model, os_path=os_path) |
|
757 | self.run_post_save_hook(model=model, os_path=os_path) | |
711 |
|
758 | |||
712 | return model |
|
759 | return model | |
713 |
|
760 | |||
714 | def delete_file(self, path): |
|
761 | def delete_file(self, path): | |
715 | """Delete file at path.""" |
|
762 | """Delete file at path.""" | |
716 | path = path.strip('/') |
|
763 | path = path.strip('/') | |
717 | os_path = self._get_os_path(path) |
|
764 | os_path = self._get_os_path(path) | |
718 | rm = os.unlink |
|
765 | rm = os.unlink | |
719 | if os.path.isdir(os_path): |
|
766 | if os.path.isdir(os_path): | |
720 | listing = os.listdir(os_path) |
|
767 | listing = os.listdir(os_path) | |
721 | # Don't delete non-empty directories. |
|
768 | # Don't delete non-empty directories. | |
722 | # A directory containing only leftover checkpoints is |
|
769 | # A directory containing only leftover checkpoints is | |
723 | # considered empty. |
|
770 | # considered empty. | |
724 | cp_dir = getattr(self.checkpoint_manager, 'checkpoint_dir', None) |
|
771 | cp_dir = getattr(self.checkpoint_manager, 'checkpoint_dir', None) | |
725 | for entry in listing: |
|
772 | for entry in listing: | |
726 | if entry != cp_dir: |
|
773 | if entry != cp_dir: | |
727 | raise web.HTTPError(400, u'Directory %s not empty' % os_path) |
|
774 | raise web.HTTPError(400, u'Directory %s not empty' % os_path) | |
728 | elif not os.path.isfile(os_path): |
|
775 | elif not os.path.isfile(os_path): | |
729 | raise web.HTTPError(404, u'File does not exist: %s' % os_path) |
|
776 | raise web.HTTPError(404, u'File does not exist: %s' % os_path) | |
730 |
|
777 | |||
731 | if os.path.isdir(os_path): |
|
778 | if os.path.isdir(os_path): | |
732 | self.log.debug("Removing directory %s", os_path) |
|
779 | self.log.debug("Removing directory %s", os_path) | |
733 | with self.perm_to_403(): |
|
780 | with self.perm_to_403(): | |
734 | shutil.rmtree(os_path) |
|
781 | shutil.rmtree(os_path) | |
735 | else: |
|
782 | else: | |
736 | self.log.debug("Unlinking file %s", os_path) |
|
783 | self.log.debug("Unlinking file %s", os_path) | |
737 | with self.perm_to_403(): |
|
784 | with self.perm_to_403(): | |
738 | rm(os_path) |
|
785 | rm(os_path) | |
739 |
|
786 | |||
740 | def rename_file(self, old_path, new_path): |
|
787 | def rename_file(self, old_path, new_path): | |
741 | """Rename a file.""" |
|
788 | """Rename a file.""" | |
742 | old_path = old_path.strip('/') |
|
789 | old_path = old_path.strip('/') | |
743 | new_path = new_path.strip('/') |
|
790 | new_path = new_path.strip('/') | |
744 | if new_path == old_path: |
|
791 | if new_path == old_path: | |
745 | return |
|
792 | return | |
746 |
|
793 | |||
747 | new_os_path = self._get_os_path(new_path) |
|
794 | new_os_path = self._get_os_path(new_path) | |
748 | old_os_path = self._get_os_path(old_path) |
|
795 | old_os_path = self._get_os_path(old_path) | |
749 |
|
796 | |||
750 | # Should we proceed with the move? |
|
797 | # Should we proceed with the move? | |
751 | if os.path.exists(new_os_path): |
|
798 | if os.path.exists(new_os_path): | |
752 | raise web.HTTPError(409, u'File already exists: %s' % new_path) |
|
799 | raise web.HTTPError(409, u'File already exists: %s' % new_path) | |
753 |
|
800 | |||
754 | # Move the file |
|
801 | # Move the file | |
755 | try: |
|
802 | try: | |
756 | with self.perm_to_403(): |
|
803 | with self.perm_to_403(): | |
757 | shutil.move(old_os_path, new_os_path) |
|
804 | shutil.move(old_os_path, new_os_path) | |
758 | except web.HTTPError: |
|
805 | except web.HTTPError: | |
759 | raise |
|
806 | raise | |
760 | except Exception as e: |
|
807 | except Exception as e: | |
761 | raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % (old_path, e)) |
|
808 | raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % (old_path, e)) | |
762 |
|
809 | |||
763 | def info_string(self): |
|
810 | def info_string(self): | |
764 | return "Serving notebooks from local directory: %s" % self.root_dir |
|
811 | return "Serving notebooks from local directory: %s" % self.root_dir | |
765 |
|
812 | |||
766 | def get_kernel_path(self, path, model=None): |
|
813 | def get_kernel_path(self, path, model=None): | |
767 | """Return the initial API path of a kernel associated with a given notebook""" |
|
814 | """Return the initial API path of a kernel associated with a given notebook""" | |
768 | if '/' in path: |
|
815 | if '/' in path: | |
769 | parent_dir = path.rsplit('/', 1)[0] |
|
816 | parent_dir = path.rsplit('/', 1)[0] | |
770 | else: |
|
817 | else: | |
771 | parent_dir = '' |
|
818 | parent_dir = '' | |
772 | return parent_dir |
|
819 | return parent_dir |
@@ -1,536 +1,539 b'' | |||||
1 | """A base class for contents managers.""" |
|
1 | """A base class for contents managers.""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | from fnmatch import fnmatch |
|
6 | from fnmatch import fnmatch | |
7 | import itertools |
|
7 | import itertools | |
8 | import json |
|
8 | import json | |
9 | import os |
|
9 | import os | |
10 | import re |
|
10 | import re | |
11 |
|
11 | |||
12 | from tornado.web import HTTPError |
|
12 | from tornado.web import HTTPError | |
13 |
|
13 | |||
14 | from IPython import nbformat |
|
|||
15 | from IPython.config.configurable import LoggingConfigurable |
|
14 | from IPython.config.configurable import LoggingConfigurable | |
16 | from IPython.nbformat import sign, validate, ValidationError |
|
15 | from IPython.nbformat import sign, validate, ValidationError | |
17 | from IPython.nbformat.v4 import new_notebook |
|
16 | from IPython.nbformat.v4 import new_notebook | |
18 | from IPython.utils.importstring import import_item |
|
17 | from IPython.utils.importstring import import_item | |
19 | from IPython.utils.traitlets import ( |
|
18 | from IPython.utils.traitlets import ( | |
20 | Any, |
|
19 | Any, | |
21 | Dict, |
|
20 | Dict, | |
22 | Instance, |
|
21 | Instance, | |
23 | List, |
|
22 | List, | |
24 | TraitError, |
|
23 | TraitError, | |
25 | Type, |
|
24 | Type, | |
26 | Unicode, |
|
25 | Unicode, | |
27 | ) |
|
26 | ) | |
28 | from IPython.utils.py3compat import string_types |
|
27 | from IPython.utils.py3compat import string_types | |
29 |
|
28 | |||
30 | copy_pat = re.compile(r'\-Copy\d*\.') |
|
29 | copy_pat = re.compile(r'\-Copy\d*\.') | |
31 |
|
30 | |||
32 |
|
31 | |||
33 | class CheckpointManager(LoggingConfigurable): |
|
32 | class CheckpointManager(LoggingConfigurable): | |
34 | """ |
|
33 | """ | |
35 | Base class for managing checkpoints for a ContentsManager. |
|
34 | Base class for managing checkpoints for a ContentsManager. | |
36 | """ |
|
35 | """ | |
|
36 | ||||
|
37 | def create_checkpoint(self, contents_mgr, path): | |||
|
38 | model = contents_mgr.get(path, content=True) | |||
|
39 | type = model['type'] | |||
|
40 | if type == 'notebook': | |||
|
41 | return self.create_notebook_checkpoint( | |||
|
42 | model['content'], | |||
|
43 | path, | |||
|
44 | ) | |||
|
45 | elif type == 'file': | |||
|
46 | return self.create_file_checkpoint( | |||
|
47 | model['content'], | |||
|
48 | model['format'], | |||
|
49 | path, | |||
|
50 | ) | |||
|
51 | ||||
|
52 | def restore_checkpoint(self, contents_mgr, checkpoint_id, path): | |||
|
53 | """Restore a checkpoint.""" | |||
|
54 | type = contents_mgr.get(path, content=False)['type'] | |||
|
55 | model = self.get_checkpoint(checkpoint_id, path, type) | |||
|
56 | contents_mgr.save(model, path) | |||
|
57 | ||||
37 | def create_file_checkpoint(self, content, format, path): |
|
58 | def create_file_checkpoint(self, content, format, path): | |
38 | """Create a checkpoint of the current state of a file |
|
59 | """Create a checkpoint of the current state of a file | |
39 |
|
60 | |||
40 | Returns a checkpoint model for the new checkpoint. |
|
61 | Returns a checkpoint model for the new checkpoint. | |
41 | """ |
|
62 | """ | |
42 | raise NotImplementedError("must be implemented in a subclass") |
|
63 | raise NotImplementedError("must be implemented in a subclass") | |
43 |
|
64 | |||
44 | def create_notebook_checkpoint(self, nb, path): |
|
65 | def create_notebook_checkpoint(self, nb, path): | |
45 | """Create a checkpoint of the current state of a file |
|
66 | """Create a checkpoint of the current state of a file | |
46 |
|
67 | |||
47 | Returns a checkpoint model for the new checkpoint. |
|
68 | Returns a checkpoint model for the new checkpoint. | |
48 | """ |
|
69 | """ | |
49 | raise NotImplementedError("must be implemented in a subclass") |
|
70 | raise NotImplementedError("must be implemented in a subclass") | |
50 |
|
71 | |||
51 | def get_checkpoint(self, checkpoint_id, path, type): |
|
72 | def get_checkpoint(self, checkpoint_id, path, type): | |
52 | """Get the content of a checkpoint. |
|
73 | """Get the content of a checkpoint. | |
53 |
|
74 | |||
54 | Returns an unvalidated model with the same structure as |
|
75 | Returns an unvalidated model with the same structure as | |
55 | the return value of ContentsManager.get |
|
76 | the return value of ContentsManager.get | |
56 | """ |
|
77 | """ | |
57 | raise NotImplementedError("must be implemented in a subclass") |
|
78 | raise NotImplementedError("must be implemented in a subclass") | |
58 |
|
79 | |||
59 | def rename_checkpoint(self, checkpoint_id, old_path, new_path): |
|
80 | def rename_checkpoint(self, checkpoint_id, old_path, new_path): | |
60 | """Rename a single checkpoint from old_path to new_path.""" |
|
81 | """Rename a single checkpoint from old_path to new_path.""" | |
61 | raise NotImplementedError("must be implemented in a subclass") |
|
82 | raise NotImplementedError("must be implemented in a subclass") | |
62 |
|
83 | |||
63 | def delete_checkpoint(self, checkpoint_id, path): |
|
84 | def delete_checkpoint(self, checkpoint_id, path): | |
64 | """delete a checkpoint for a file""" |
|
85 | """delete a checkpoint for a file""" | |
65 | raise NotImplementedError("must be implemented in a subclass") |
|
86 | raise NotImplementedError("must be implemented in a subclass") | |
66 |
|
87 | |||
67 | def list_checkpoints(self, path): |
|
88 | def list_checkpoints(self, path): | |
68 | """Return a list of checkpoints for a given file""" |
|
89 | """Return a list of checkpoints for a given file""" | |
69 | raise NotImplementedError("must be implemented in a subclass") |
|
90 | raise NotImplementedError("must be implemented in a subclass") | |
70 |
|
91 | |||
71 | def rename_all_checkpoints(self, old_path, new_path): |
|
92 | def rename_all_checkpoints(self, old_path, new_path): | |
72 | """Rename all checkpoints for old_path to new_path.""" |
|
93 | """Rename all checkpoints for old_path to new_path.""" | |
73 | for cp in self.list_checkpoints(old_path): |
|
94 | for cp in self.list_checkpoints(old_path): | |
74 | self.rename_checkpoint(cp['id'], old_path, new_path) |
|
95 | self.rename_checkpoint(cp['id'], old_path, new_path) | |
75 |
|
96 | |||
76 | def delete_all_checkpoints(self, path): |
|
97 | def delete_all_checkpoints(self, path): | |
77 | """Delete all checkpoints for the given path.""" |
|
98 | """Delete all checkpoints for the given path.""" | |
78 | for checkpoint in self.list_checkpoints(path): |
|
99 | for checkpoint in self.list_checkpoints(path): | |
79 | self.delete_checkpoint(checkpoint['id'], path) |
|
100 | self.delete_checkpoint(checkpoint['id'], path) | |
80 |
|
101 | |||
81 |
|
102 | |||
82 | class ContentsManager(LoggingConfigurable): |
|
103 | class ContentsManager(LoggingConfigurable): | |
83 | """Base class for serving files and directories. |
|
104 | """Base class for serving files and directories. | |
84 |
|
105 | |||
85 | This serves any text or binary file, |
|
106 | This serves any text or binary file, | |
86 | as well as directories, |
|
107 | as well as directories, | |
87 | with special handling for JSON notebook documents. |
|
108 | with special handling for JSON notebook documents. | |
88 |
|
109 | |||
89 | Most APIs take a path argument, |
|
110 | Most APIs take a path argument, | |
90 | which is always an API-style unicode path, |
|
111 | which is always an API-style unicode path, | |
91 | and always refers to a directory. |
|
112 | and always refers to a directory. | |
92 |
|
113 | |||
93 | - unicode, not url-escaped |
|
114 | - unicode, not url-escaped | |
94 | - '/'-separated |
|
115 | - '/'-separated | |
95 | - leading and trailing '/' will be stripped |
|
116 | - leading and trailing '/' will be stripped | |
96 | - if unspecified, path defaults to '', |
|
117 | - if unspecified, path defaults to '', | |
97 | indicating the root path. |
|
118 | indicating the root path. | |
98 |
|
119 | |||
99 | """ |
|
120 | """ | |
100 |
|
121 | |||
101 | notary = Instance(sign.NotebookNotary) |
|
122 | notary = Instance(sign.NotebookNotary) | |
102 | def _notary_default(self): |
|
123 | def _notary_default(self): | |
103 | return sign.NotebookNotary(parent=self) |
|
124 | return sign.NotebookNotary(parent=self) | |
104 |
|
125 | |||
105 | hide_globs = List(Unicode, [ |
|
126 | hide_globs = List(Unicode, [ | |
106 | u'__pycache__', '*.pyc', '*.pyo', |
|
127 | u'__pycache__', '*.pyc', '*.pyo', | |
107 | '.DS_Store', '*.so', '*.dylib', '*~', |
|
128 | '.DS_Store', '*.so', '*.dylib', '*~', | |
108 | ], config=True, help=""" |
|
129 | ], config=True, help=""" | |
109 | Glob patterns to hide in file and directory listings. |
|
130 | Glob patterns to hide in file and directory listings. | |
110 | """) |
|
131 | """) | |
111 |
|
132 | |||
112 | untitled_notebook = Unicode("Untitled", config=True, |
|
133 | untitled_notebook = Unicode("Untitled", config=True, | |
113 | help="The base name used when creating untitled notebooks." |
|
134 | help="The base name used when creating untitled notebooks." | |
114 | ) |
|
135 | ) | |
115 |
|
136 | |||
116 | untitled_file = Unicode("untitled", config=True, |
|
137 | untitled_file = Unicode("untitled", config=True, | |
117 | help="The base name used when creating untitled files." |
|
138 | help="The base name used when creating untitled files." | |
118 | ) |
|
139 | ) | |
119 |
|
140 | |||
120 | untitled_directory = Unicode("Untitled Folder", config=True, |
|
141 | untitled_directory = Unicode("Untitled Folder", config=True, | |
121 | help="The base name used when creating untitled directories." |
|
142 | help="The base name used when creating untitled directories." | |
122 | ) |
|
143 | ) | |
123 |
|
144 | |||
124 | pre_save_hook = Any(None, config=True, |
|
145 | pre_save_hook = Any(None, config=True, | |
125 | help="""Python callable or importstring thereof |
|
146 | help="""Python callable or importstring thereof | |
126 |
|
147 | |||
127 | To be called on a contents model prior to save. |
|
148 | To be called on a contents model prior to save. | |
128 |
|
149 | |||
129 | This can be used to process the structure, |
|
150 | This can be used to process the structure, | |
130 | such as removing notebook outputs or other side effects that |
|
151 | such as removing notebook outputs or other side effects that | |
131 | should not be saved. |
|
152 | should not be saved. | |
132 |
|
153 | |||
133 | It will be called as (all arguments passed by keyword): |
|
154 | It will be called as (all arguments passed by keyword): | |
134 |
|
155 | |||
135 | hook(path=path, model=model, contents_manager=self) |
|
156 | hook(path=path, model=model, contents_manager=self) | |
136 |
|
157 | |||
137 | model: the model to be saved. Includes file contents. |
|
158 | model: the model to be saved. Includes file contents. | |
138 | modifying this dict will affect the file that is stored. |
|
159 | modifying this dict will affect the file that is stored. | |
139 | path: the API path of the save destination |
|
160 | path: the API path of the save destination | |
140 | contents_manager: this ContentsManager instance |
|
161 | contents_manager: this ContentsManager instance | |
141 | """ |
|
162 | """ | |
142 | ) |
|
163 | ) | |
143 | def _pre_save_hook_changed(self, name, old, new): |
|
164 | def _pre_save_hook_changed(self, name, old, new): | |
144 | if new and isinstance(new, string_types): |
|
165 | if new and isinstance(new, string_types): | |
145 | self.pre_save_hook = import_item(self.pre_save_hook) |
|
166 | self.pre_save_hook = import_item(self.pre_save_hook) | |
146 | elif new: |
|
167 | elif new: | |
147 | if not callable(new): |
|
168 | if not callable(new): | |
148 | raise TraitError("pre_save_hook must be callable") |
|
169 | raise TraitError("pre_save_hook must be callable") | |
149 |
|
170 | |||
150 | def run_pre_save_hook(self, model, path, **kwargs): |
|
171 | def run_pre_save_hook(self, model, path, **kwargs): | |
151 | """Run the pre-save hook if defined, and log errors""" |
|
172 | """Run the pre-save hook if defined, and log errors""" | |
152 | if self.pre_save_hook: |
|
173 | if self.pre_save_hook: | |
153 | try: |
|
174 | try: | |
154 | self.log.debug("Running pre-save hook on %s", path) |
|
175 | self.log.debug("Running pre-save hook on %s", path) | |
155 | self.pre_save_hook(model=model, path=path, contents_manager=self, **kwargs) |
|
176 | self.pre_save_hook(model=model, path=path, contents_manager=self, **kwargs) | |
156 | except Exception: |
|
177 | except Exception: | |
157 | self.log.error("Pre-save hook failed on %s", path, exc_info=True) |
|
178 | self.log.error("Pre-save hook failed on %s", path, exc_info=True) | |
158 |
|
179 | |||
159 | checkpoint_manager_class = Type(CheckpointManager, config=True) |
|
180 | checkpoint_manager_class = Type(CheckpointManager, config=True) | |
160 | checkpoint_manager = Instance(CheckpointManager, config=True) |
|
181 | checkpoint_manager = Instance(CheckpointManager, config=True) | |
161 | checkpoint_manager_kwargs = Dict(allow_none=False, config=True) |
|
182 | checkpoint_manager_kwargs = Dict(allow_none=False, config=True) | |
|
183 | backend = Unicode(default_value="") | |||
162 |
|
184 | |||
163 | def _checkpoint_manager_default(self): |
|
185 | def _checkpoint_manager_default(self): | |
164 | return self.checkpoint_manager_class(**self.checkpoint_manager_kwargs) |
|
186 | return self.checkpoint_manager_class(**self.checkpoint_manager_kwargs) | |
165 |
|
187 | |||
166 | def _checkpoint_manager_kwargs_default(self): |
|
188 | def _checkpoint_manager_kwargs_default(self): | |
167 | return dict( |
|
189 | return dict( | |
168 | parent=self, |
|
190 | parent=self, | |
169 | log=self.log, |
|
191 | log=self.log, | |
170 | ) |
|
192 | ) | |
171 |
|
193 | |||
172 | # ContentsManager API part 1: methods that must be |
|
194 | # ContentsManager API part 1: methods that must be | |
173 | # implemented in subclasses. |
|
195 | # implemented in subclasses. | |
174 |
|
196 | |||
175 | def dir_exists(self, path): |
|
197 | def dir_exists(self, path): | |
176 | """Does the API-style path (directory) actually exist? |
|
198 | """Does the API-style path (directory) actually exist? | |
177 |
|
199 | |||
178 | Like os.path.isdir |
|
200 | Like os.path.isdir | |
179 |
|
201 | |||
180 | Override this method in subclasses. |
|
202 | Override this method in subclasses. | |
181 |
|
203 | |||
182 | Parameters |
|
204 | Parameters | |
183 | ---------- |
|
205 | ---------- | |
184 | path : string |
|
206 | path : string | |
185 | The path to check |
|
207 | The path to check | |
186 |
|
208 | |||
187 | Returns |
|
209 | Returns | |
188 | ------- |
|
210 | ------- | |
189 | exists : bool |
|
211 | exists : bool | |
190 | Whether the path does indeed exist. |
|
212 | Whether the path does indeed exist. | |
191 | """ |
|
213 | """ | |
192 | raise NotImplementedError |
|
214 | raise NotImplementedError | |
193 |
|
215 | |||
194 | def is_hidden(self, path): |
|
216 | def is_hidden(self, path): | |
195 | """Does the API style path correspond to a hidden directory or file? |
|
217 | """Does the API style path correspond to a hidden directory or file? | |
196 |
|
218 | |||
197 | Parameters |
|
219 | Parameters | |
198 | ---------- |
|
220 | ---------- | |
199 | path : string |
|
221 | path : string | |
200 | The path to check. This is an API path (`/` separated, |
|
222 | The path to check. This is an API path (`/` separated, | |
201 | relative to root dir). |
|
223 | relative to root dir). | |
202 |
|
224 | |||
203 | Returns |
|
225 | Returns | |
204 | ------- |
|
226 | ------- | |
205 | hidden : bool |
|
227 | hidden : bool | |
206 | Whether the path is hidden. |
|
228 | Whether the path is hidden. | |
207 |
|
229 | |||
208 | """ |
|
230 | """ | |
209 | raise NotImplementedError |
|
231 | raise NotImplementedError | |
210 |
|
232 | |||
211 | def file_exists(self, path=''): |
|
233 | def file_exists(self, path=''): | |
212 | """Does a file exist at the given path? |
|
234 | """Does a file exist at the given path? | |
213 |
|
235 | |||
214 | Like os.path.isfile |
|
236 | Like os.path.isfile | |
215 |
|
237 | |||
216 | Override this method in subclasses. |
|
238 | Override this method in subclasses. | |
217 |
|
239 | |||
218 | Parameters |
|
240 | Parameters | |
219 | ---------- |
|
241 | ---------- | |
220 | name : string |
|
242 | name : string | |
221 | The name of the file you are checking. |
|
243 | The name of the file you are checking. | |
222 | path : string |
|
244 | path : string | |
223 | The relative path to the file's directory (with '/' as separator) |
|
245 | The relative path to the file's directory (with '/' as separator) | |
224 |
|
246 | |||
225 | Returns |
|
247 | Returns | |
226 | ------- |
|
248 | ------- | |
227 | exists : bool |
|
249 | exists : bool | |
228 | Whether the file exists. |
|
250 | Whether the file exists. | |
229 | """ |
|
251 | """ | |
230 | raise NotImplementedError('must be implemented in a subclass') |
|
252 | raise NotImplementedError('must be implemented in a subclass') | |
231 |
|
253 | |||
232 | def exists(self, path): |
|
254 | def exists(self, path): | |
233 | """Does a file or directory exist at the given path? |
|
255 | """Does a file or directory exist at the given path? | |
234 |
|
256 | |||
235 | Like os.path.exists |
|
257 | Like os.path.exists | |
236 |
|
258 | |||
237 | Parameters |
|
259 | Parameters | |
238 | ---------- |
|
260 | ---------- | |
239 | path : string |
|
261 | path : string | |
240 | The relative path to the file's directory (with '/' as separator) |
|
262 | The relative path to the file's directory (with '/' as separator) | |
241 |
|
263 | |||
242 | Returns |
|
264 | Returns | |
243 | ------- |
|
265 | ------- | |
244 | exists : bool |
|
266 | exists : bool | |
245 | Whether the target exists. |
|
267 | Whether the target exists. | |
246 | """ |
|
268 | """ | |
247 | return self.file_exists(path) or self.dir_exists(path) |
|
269 | return self.file_exists(path) or self.dir_exists(path) | |
248 |
|
270 | |||
249 | def get(self, path, content=True, type=None, format=None): |
|
271 | def get(self, path, content=True, type=None, format=None): | |
250 | """Get the model of a file or directory with or without content.""" |
|
272 | """Get the model of a file or directory with or without content.""" | |
251 | raise NotImplementedError('must be implemented in a subclass') |
|
273 | raise NotImplementedError('must be implemented in a subclass') | |
252 |
|
274 | |||
253 | def save(self, model, path): |
|
275 | def save(self, model, path): | |
254 | """Save the file or directory and return the model with no content. |
|
276 | """Save the file or directory and return the model with no content. | |
255 |
|
277 | |||
256 | Save implementations should call self.run_pre_save_hook(model=model, path=path) |
|
278 | Save implementations should call self.run_pre_save_hook(model=model, path=path) | |
257 | prior to writing any data. |
|
279 | prior to writing any data. | |
258 | """ |
|
280 | """ | |
259 | raise NotImplementedError('must be implemented in a subclass') |
|
281 | raise NotImplementedError('must be implemented in a subclass') | |
260 |
|
282 | |||
261 | def delete_file(self, path): |
|
283 | def delete_file(self, path): | |
262 | """Delete file or directory by path.""" |
|
284 | """Delete file or directory by path.""" | |
263 | raise NotImplementedError('must be implemented in a subclass') |
|
285 | raise NotImplementedError('must be implemented in a subclass') | |
264 |
|
286 | |||
265 | def rename_file(self, old_path, new_path): |
|
287 | def rename_file(self, old_path, new_path): | |
266 | """Rename a file.""" |
|
288 | """Rename a file.""" | |
267 | raise NotImplementedError('must be implemented in a subclass') |
|
289 | raise NotImplementedError('must be implemented in a subclass') | |
268 |
|
290 | |||
269 | # ContentsManager API part 2: methods that have useable default |
|
291 | # ContentsManager API part 2: methods that have useable default | |
270 | # implementations, but can be overridden in subclasses. |
|
292 | # implementations, but can be overridden in subclasses. | |
271 |
|
293 | |||
272 | def delete(self, path): |
|
294 | def delete(self, path): | |
273 | """Delete a file/directory and any associated checkpoints.""" |
|
295 | """Delete a file/directory and any associated checkpoints.""" | |
274 | self.delete_file(path) |
|
296 | self.delete_file(path) | |
275 | self.checkpoint_manager.delete_all_checkpoints(path) |
|
297 | self.checkpoint_manager.delete_all_checkpoints(path) | |
276 |
|
298 | |||
277 | def rename(self, old_path, new_path): |
|
299 | def rename(self, old_path, new_path): | |
278 | """Rename a file and any checkpoints associated with that file.""" |
|
300 | """Rename a file and any checkpoints associated with that file.""" | |
279 | self.rename_file(old_path, new_path) |
|
301 | self.rename_file(old_path, new_path) | |
280 | self.checkpoint_manager.rename_all_checkpoints(old_path, new_path) |
|
302 | self.checkpoint_manager.rename_all_checkpoints(old_path, new_path) | |
281 |
|
303 | |||
282 | def update(self, model, path): |
|
304 | def update(self, model, path): | |
283 | """Update the file's path |
|
305 | """Update the file's path | |
284 |
|
306 | |||
285 | For use in PATCH requests, to enable renaming a file without |
|
307 | For use in PATCH requests, to enable renaming a file without | |
286 | re-uploading its contents. Only used for renaming at the moment. |
|
308 | re-uploading its contents. Only used for renaming at the moment. | |
287 | """ |
|
309 | """ | |
288 | path = path.strip('/') |
|
310 | path = path.strip('/') | |
289 | new_path = model.get('path', path).strip('/') |
|
311 | new_path = model.get('path', path).strip('/') | |
290 | if path != new_path: |
|
312 | if path != new_path: | |
291 | self.rename(path, new_path) |
|
313 | self.rename(path, new_path) | |
292 | model = self.get(new_path, content=False) |
|
314 | model = self.get(new_path, content=False) | |
293 | return model |
|
315 | return model | |
294 |
|
316 | |||
295 | def info_string(self): |
|
317 | def info_string(self): | |
296 | return "Serving contents" |
|
318 | return "Serving contents" | |
297 |
|
319 | |||
298 | def get_kernel_path(self, path, model=None): |
|
320 | def get_kernel_path(self, path, model=None): | |
299 | """Return the API path for the kernel |
|
321 | """Return the API path for the kernel | |
300 |
|
322 | |||
301 | KernelManagers can turn this value into a filesystem path, |
|
323 | KernelManagers can turn this value into a filesystem path, | |
302 | or ignore it altogether. |
|
324 | or ignore it altogether. | |
303 |
|
325 | |||
304 | The default value here will start kernels in the directory of the |
|
326 | The default value here will start kernels in the directory of the | |
305 | notebook server. FileContentsManager overrides this to use the |
|
327 | notebook server. FileContentsManager overrides this to use the | |
306 | directory containing the notebook. |
|
328 | directory containing the notebook. | |
307 | """ |
|
329 | """ | |
308 | return '' |
|
330 | return '' | |
309 |
|
331 | |||
310 | def increment_filename(self, filename, path='', insert=''): |
|
332 | def increment_filename(self, filename, path='', insert=''): | |
311 | """Increment a filename until it is unique. |
|
333 | """Increment a filename until it is unique. | |
312 |
|
334 | |||
313 | Parameters |
|
335 | Parameters | |
314 | ---------- |
|
336 | ---------- | |
315 | filename : unicode |
|
337 | filename : unicode | |
316 | The name of a file, including extension |
|
338 | The name of a file, including extension | |
317 | path : unicode |
|
339 | path : unicode | |
318 | The API path of the target's directory |
|
340 | The API path of the target's directory | |
319 |
|
341 | |||
320 | Returns |
|
342 | Returns | |
321 | ------- |
|
343 | ------- | |
322 | name : unicode |
|
344 | name : unicode | |
323 | A filename that is unique, based on the input filename. |
|
345 | A filename that is unique, based on the input filename. | |
324 | """ |
|
346 | """ | |
325 | path = path.strip('/') |
|
347 | path = path.strip('/') | |
326 | basename, ext = os.path.splitext(filename) |
|
348 | basename, ext = os.path.splitext(filename) | |
327 | for i in itertools.count(): |
|
349 | for i in itertools.count(): | |
328 | if i: |
|
350 | if i: | |
329 | insert_i = '{}{}'.format(insert, i) |
|
351 | insert_i = '{}{}'.format(insert, i) | |
330 | else: |
|
352 | else: | |
331 | insert_i = '' |
|
353 | insert_i = '' | |
332 | name = u'{basename}{insert}{ext}'.format(basename=basename, |
|
354 | name = u'{basename}{insert}{ext}'.format(basename=basename, | |
333 | insert=insert_i, ext=ext) |
|
355 | insert=insert_i, ext=ext) | |
334 | if not self.exists(u'{}/{}'.format(path, name)): |
|
356 | if not self.exists(u'{}/{}'.format(path, name)): | |
335 | break |
|
357 | break | |
336 | return name |
|
358 | return name | |
337 |
|
359 | |||
338 | def validate_notebook_model(self, model): |
|
360 | def validate_notebook_model(self, model): | |
339 | """Add failed-validation message to model""" |
|
361 | """Add failed-validation message to model""" | |
340 | try: |
|
362 | try: | |
341 | validate(model['content']) |
|
363 | validate(model['content']) | |
342 | except ValidationError as e: |
|
364 | except ValidationError as e: | |
343 | model['message'] = u'Notebook Validation failed: {}:\n{}'.format( |
|
365 | model['message'] = u'Notebook Validation failed: {}:\n{}'.format( | |
344 | e.message, json.dumps(e.instance, indent=1, default=lambda obj: '<UNKNOWN>'), |
|
366 | e.message, json.dumps(e.instance, indent=1, default=lambda obj: '<UNKNOWN>'), | |
345 | ) |
|
367 | ) | |
346 | return model |
|
368 | return model | |
347 |
|
369 | |||
348 | def new_untitled(self, path='', type='', ext=''): |
|
370 | def new_untitled(self, path='', type='', ext=''): | |
349 | """Create a new untitled file or directory in path |
|
371 | """Create a new untitled file or directory in path | |
350 |
|
372 | |||
351 | path must be a directory |
|
373 | path must be a directory | |
352 |
|
374 | |||
353 | File extension can be specified. |
|
375 | File extension can be specified. | |
354 |
|
376 | |||
355 | Use `new` to create files with a fully specified path (including filename). |
|
377 | Use `new` to create files with a fully specified path (including filename). | |
356 | """ |
|
378 | """ | |
357 | path = path.strip('/') |
|
379 | path = path.strip('/') | |
358 | if not self.dir_exists(path): |
|
380 | if not self.dir_exists(path): | |
359 | raise HTTPError(404, 'No such directory: %s' % path) |
|
381 | raise HTTPError(404, 'No such directory: %s' % path) | |
360 |
|
382 | |||
361 | model = {} |
|
383 | model = {} | |
362 | if type: |
|
384 | if type: | |
363 | model['type'] = type |
|
385 | model['type'] = type | |
364 |
|
386 | |||
365 | if ext == '.ipynb': |
|
387 | if ext == '.ipynb': | |
366 | model.setdefault('type', 'notebook') |
|
388 | model.setdefault('type', 'notebook') | |
367 | else: |
|
389 | else: | |
368 | model.setdefault('type', 'file') |
|
390 | model.setdefault('type', 'file') | |
369 |
|
391 | |||
370 | insert = '' |
|
392 | insert = '' | |
371 | if model['type'] == 'directory': |
|
393 | if model['type'] == 'directory': | |
372 | untitled = self.untitled_directory |
|
394 | untitled = self.untitled_directory | |
373 | insert = ' ' |
|
395 | insert = ' ' | |
374 | elif model['type'] == 'notebook': |
|
396 | elif model['type'] == 'notebook': | |
375 | untitled = self.untitled_notebook |
|
397 | untitled = self.untitled_notebook | |
376 | ext = '.ipynb' |
|
398 | ext = '.ipynb' | |
377 | elif model['type'] == 'file': |
|
399 | elif model['type'] == 'file': | |
378 | untitled = self.untitled_file |
|
400 | untitled = self.untitled_file | |
379 | else: |
|
401 | else: | |
380 | raise HTTPError(400, "Unexpected model type: %r" % model['type']) |
|
402 | raise HTTPError(400, "Unexpected model type: %r" % model['type']) | |
381 |
|
403 | |||
382 | name = self.increment_filename(untitled + ext, path, insert=insert) |
|
404 | name = self.increment_filename(untitled + ext, path, insert=insert) | |
383 | path = u'{0}/{1}'.format(path, name) |
|
405 | path = u'{0}/{1}'.format(path, name) | |
384 | return self.new(model, path) |
|
406 | return self.new(model, path) | |
385 |
|
407 | |||
386 | def new(self, model=None, path=''): |
|
408 | def new(self, model=None, path=''): | |
387 | """Create a new file or directory and return its model with no content. |
|
409 | """Create a new file or directory and return its model with no content. | |
388 |
|
410 | |||
389 | To create a new untitled entity in a directory, use `new_untitled`. |
|
411 | To create a new untitled entity in a directory, use `new_untitled`. | |
390 | """ |
|
412 | """ | |
391 | path = path.strip('/') |
|
413 | path = path.strip('/') | |
392 | if model is None: |
|
414 | if model is None: | |
393 | model = {} |
|
415 | model = {} | |
394 |
|
416 | |||
395 | if path.endswith('.ipynb'): |
|
417 | if path.endswith('.ipynb'): | |
396 | model.setdefault('type', 'notebook') |
|
418 | model.setdefault('type', 'notebook') | |
397 | else: |
|
419 | else: | |
398 | model.setdefault('type', 'file') |
|
420 | model.setdefault('type', 'file') | |
399 |
|
421 | |||
400 | # no content, not a directory, so fill out new-file model |
|
422 | # no content, not a directory, so fill out new-file model | |
401 | if 'content' not in model and model['type'] != 'directory': |
|
423 | if 'content' not in model and model['type'] != 'directory': | |
402 | if model['type'] == 'notebook': |
|
424 | if model['type'] == 'notebook': | |
403 | model['content'] = new_notebook() |
|
425 | model['content'] = new_notebook() | |
404 | model['format'] = 'json' |
|
426 | model['format'] = 'json' | |
405 | else: |
|
427 | else: | |
406 | model['content'] = '' |
|
428 | model['content'] = '' | |
407 | model['type'] = 'file' |
|
429 | model['type'] = 'file' | |
408 | model['format'] = 'text' |
|
430 | model['format'] = 'text' | |
409 |
|
431 | |||
410 | model = self.save(model, path) |
|
432 | model = self.save(model, path) | |
411 | return model |
|
433 | return model | |
412 |
|
434 | |||
413 | def copy(self, from_path, to_path=None): |
|
435 | def copy(self, from_path, to_path=None): | |
414 | """Copy an existing file and return its new model. |
|
436 | """Copy an existing file and return its new model. | |
415 |
|
437 | |||
416 | If to_path not specified, it will be the parent directory of from_path. |
|
438 | If to_path not specified, it will be the parent directory of from_path. | |
417 | If to_path is a directory, filename will increment `from_path-Copy#.ext`. |
|
439 | If to_path is a directory, filename will increment `from_path-Copy#.ext`. | |
418 |
|
440 | |||
419 | from_path must be a full path to a file. |
|
441 | from_path must be a full path to a file. | |
420 | """ |
|
442 | """ | |
421 | path = from_path.strip('/') |
|
443 | path = from_path.strip('/') | |
422 | if to_path is not None: |
|
444 | if to_path is not None: | |
423 | to_path = to_path.strip('/') |
|
445 | to_path = to_path.strip('/') | |
424 |
|
446 | |||
425 | if '/' in path: |
|
447 | if '/' in path: | |
426 | from_dir, from_name = path.rsplit('/', 1) |
|
448 | from_dir, from_name = path.rsplit('/', 1) | |
427 | else: |
|
449 | else: | |
428 | from_dir = '' |
|
450 | from_dir = '' | |
429 | from_name = path |
|
451 | from_name = path | |
430 |
|
452 | |||
431 | model = self.get(path) |
|
453 | model = self.get(path) | |
432 | model.pop('path', None) |
|
454 | model.pop('path', None) | |
433 | model.pop('name', None) |
|
455 | model.pop('name', None) | |
434 | if model['type'] == 'directory': |
|
456 | if model['type'] == 'directory': | |
435 | raise HTTPError(400, "Can't copy directories") |
|
457 | raise HTTPError(400, "Can't copy directories") | |
436 |
|
458 | |||
437 | if to_path is None: |
|
459 | if to_path is None: | |
438 | to_path = from_dir |
|
460 | to_path = from_dir | |
439 | if self.dir_exists(to_path): |
|
461 | if self.dir_exists(to_path): | |
440 | name = copy_pat.sub(u'.', from_name) |
|
462 | name = copy_pat.sub(u'.', from_name) | |
441 | to_name = self.increment_filename(name, to_path, insert='-Copy') |
|
463 | to_name = self.increment_filename(name, to_path, insert='-Copy') | |
442 | to_path = u'{0}/{1}'.format(to_path, to_name) |
|
464 | to_path = u'{0}/{1}'.format(to_path, to_name) | |
443 |
|
465 | |||
444 | model = self.save(model, to_path) |
|
466 | model = self.save(model, to_path) | |
445 | return model |
|
467 | return model | |
446 |
|
468 | |||
447 | def log_info(self): |
|
469 | def log_info(self): | |
448 | self.log.info(self.info_string()) |
|
470 | self.log.info(self.info_string()) | |
449 |
|
471 | |||
450 | def trust_notebook(self, path): |
|
472 | def trust_notebook(self, path): | |
451 | """Explicitly trust a notebook |
|
473 | """Explicitly trust a notebook | |
452 |
|
474 | |||
453 | Parameters |
|
475 | Parameters | |
454 | ---------- |
|
476 | ---------- | |
455 | path : string |
|
477 | path : string | |
456 | The path of a notebook |
|
478 | The path of a notebook | |
457 | """ |
|
479 | """ | |
458 | model = self.get(path) |
|
480 | model = self.get(path) | |
459 | nb = model['content'] |
|
481 | nb = model['content'] | |
460 | self.log.warn("Trusting notebook %s", path) |
|
482 | self.log.warn("Trusting notebook %s", path) | |
461 | self.notary.mark_cells(nb, True) |
|
483 | self.notary.mark_cells(nb, True) | |
462 | self.save(model, path) |
|
484 | self.save(model, path) | |
463 |
|
485 | |||
464 | def check_and_sign(self, nb, path=''): |
|
486 | def check_and_sign(self, nb, path=''): | |
465 | """Check for trusted cells, and sign the notebook. |
|
487 | """Check for trusted cells, and sign the notebook. | |
466 |
|
488 | |||
467 | Called as a part of saving notebooks. |
|
489 | Called as a part of saving notebooks. | |
468 |
|
490 | |||
469 | Parameters |
|
491 | Parameters | |
470 | ---------- |
|
492 | ---------- | |
471 | nb : dict |
|
493 | nb : dict | |
472 | The notebook dict |
|
494 | The notebook dict | |
473 | path : string |
|
495 | path : string | |
474 | The notebook's path (for logging) |
|
496 | The notebook's path (for logging) | |
475 | """ |
|
497 | """ | |
476 | if self.notary.check_cells(nb): |
|
498 | if self.notary.check_cells(nb): | |
477 | self.notary.sign(nb) |
|
499 | self.notary.sign(nb) | |
478 | else: |
|
500 | else: | |
479 | self.log.warn("Saving untrusted notebook %s", path) |
|
501 | self.log.warn("Saving untrusted notebook %s", path) | |
480 |
|
502 | |||
481 | def mark_trusted_cells(self, nb, path=''): |
|
503 | def mark_trusted_cells(self, nb, path=''): | |
482 | """Mark cells as trusted if the notebook signature matches. |
|
504 | """Mark cells as trusted if the notebook signature matches. | |
483 |
|
505 | |||
484 | Called as a part of loading notebooks. |
|
506 | Called as a part of loading notebooks. | |
485 |
|
507 | |||
486 | Parameters |
|
508 | Parameters | |
487 | ---------- |
|
509 | ---------- | |
488 | nb : dict |
|
510 | nb : dict | |
489 | The notebook object (in current nbformat) |
|
511 | The notebook object (in current nbformat) | |
490 | path : string |
|
512 | path : string | |
491 | The notebook's path (for logging) |
|
513 | The notebook's path (for logging) | |
492 | """ |
|
514 | """ | |
493 | trusted = self.notary.check_signature(nb) |
|
515 | trusted = self.notary.check_signature(nb) | |
494 | if not trusted: |
|
516 | if not trusted: | |
495 | self.log.warn("Notebook %s is not trusted", path) |
|
517 | self.log.warn("Notebook %s is not trusted", path) | |
496 | self.notary.mark_cells(nb, trusted) |
|
518 | self.notary.mark_cells(nb, trusted) | |
497 |
|
519 | |||
498 | def should_list(self, name): |
|
520 | def should_list(self, name): | |
499 | """Should this file/directory name be displayed in a listing?""" |
|
521 | """Should this file/directory name be displayed in a listing?""" | |
500 | return not any(fnmatch(name, glob) for glob in self.hide_globs) |
|
522 | return not any(fnmatch(name, glob) for glob in self.hide_globs) | |
501 |
|
523 | |||
502 | # Part 3: Checkpoints API |
|
524 | # Part 3: Checkpoints API | |
503 | def create_checkpoint(self, path): |
|
525 | def create_checkpoint(self, path): | |
504 | """Create a checkpoint.""" |
|
526 | """Create a checkpoint.""" | |
505 | model = self.get(path, content=True) |
|
527 | return self.checkpoint_manager.create_checkpoint(self, path) | |
506 | type = model['type'] |
|
|||
507 | if type == 'notebook': |
|
|||
508 | return self.checkpoint_manager.create_notebook_checkpoint( |
|
|||
509 | model['content'], |
|
|||
510 | path, |
|
|||
511 | ) |
|
|||
512 | elif type == 'file': |
|
|||
513 | return self.checkpoint_manager.create_file_checkpoint( |
|
|||
514 | model['content'], |
|
|||
515 | model['format'], |
|
|||
516 | path, |
|
|||
517 | ) |
|
|||
518 |
|
||||
519 | def list_checkpoints(self, path): |
|
|||
520 | return self.checkpoint_manager.list_checkpoints(path) |
|
|||
521 |
|
528 | |||
522 | def restore_checkpoint(self, checkpoint_id, path): |
|
529 | def restore_checkpoint(self, checkpoint_id, path): | |
523 | """ |
|
530 | """ | |
524 | Restore a checkpoint. |
|
531 | Restore a checkpoint. | |
525 | """ |
|
532 | """ | |
526 | return self.save( |
|
533 | self.checkpoint_manager.restore_checkpoint(self, checkpoint_id, path) | |
527 | model=self.checkpoint_manager.get_checkpoint( |
|
534 | ||
528 | checkpoint_id, |
|
535 | def list_checkpoints(self, path): | |
529 | path, |
|
536 | return self.checkpoint_manager.list_checkpoints(path) | |
530 | self.get(path, content=False)['type'] |
|
|||
531 | ), |
|
|||
532 | path=path, |
|
|||
533 | ) |
|
|||
534 |
|
537 | |||
535 | def delete_checkpoint(self, checkpoint_id, path): |
|
538 | def delete_checkpoint(self, checkpoint_id, path): | |
536 | return self.checkpoint_manager.delete_checkpoint(checkpoint_id, path) |
|
539 | return self.checkpoint_manager.delete_checkpoint(checkpoint_id, path) |
@@ -1,616 +1,638 b'' | |||||
1 | # coding: utf-8 |
|
1 | # coding: utf-8 | |
2 | """Test the contents webservice API.""" |
|
2 | """Test the contents webservice API.""" | |
3 |
|
3 | |||
4 | import base64 |
|
4 | import base64 | |
5 | from contextlib import contextmanager |
|
5 | from contextlib import contextmanager | |
6 | import io |
|
6 | import io | |
7 | import json |
|
7 | import json | |
8 | import os |
|
8 | import os | |
9 | import shutil |
|
9 | import shutil | |
10 | from unicodedata import normalize |
|
10 | from unicodedata import normalize | |
11 |
|
11 | |||
12 | pjoin = os.path.join |
|
12 | pjoin = os.path.join | |
13 |
|
13 | |||
14 | import requests |
|
14 | import requests | |
15 |
|
15 | |||
16 | from IPython.html.utils import url_path_join, url_escape, to_os_path |
|
16 | from IPython.html.utils import url_path_join, url_escape, to_os_path | |
17 | from IPython.html.tests.launchnotebook import NotebookTestBase, assert_http_error |
|
17 | from IPython.html.tests.launchnotebook import NotebookTestBase, assert_http_error | |
18 | from IPython.nbformat import read, write, from_dict |
|
18 | from IPython.nbformat import read, write, from_dict | |
19 | from IPython.nbformat.v4 import ( |
|
19 | from IPython.nbformat.v4 import ( | |
20 | new_notebook, new_markdown_cell, |
|
20 | new_notebook, new_markdown_cell, | |
21 | ) |
|
21 | ) | |
22 | from IPython.nbformat import v2 |
|
22 | from IPython.nbformat import v2 | |
23 | from IPython.utils import py3compat |
|
23 | from IPython.utils import py3compat | |
24 | from IPython.utils.data import uniq_stable |
|
24 | from IPython.utils.data import uniq_stable | |
25 | from IPython.utils.tempdir import TemporaryDirectory |
|
25 | from IPython.utils.tempdir import TemporaryDirectory | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | def notebooks_only(dir_model): |
|
28 | def notebooks_only(dir_model): | |
29 | return [nb for nb in dir_model['content'] if nb['type']=='notebook'] |
|
29 | return [nb for nb in dir_model['content'] if nb['type']=='notebook'] | |
30 |
|
30 | |||
31 | def dirs_only(dir_model): |
|
31 | def dirs_only(dir_model): | |
32 | return [x for x in dir_model['content'] if x['type']=='directory'] |
|
32 | return [x for x in dir_model['content'] if x['type']=='directory'] | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | class API(object): |
|
35 | class API(object): | |
36 | """Wrapper for contents API calls.""" |
|
36 | """Wrapper for contents API calls.""" | |
37 | def __init__(self, base_url): |
|
37 | def __init__(self, base_url): | |
38 | self.base_url = base_url |
|
38 | self.base_url = base_url | |
39 |
|
39 | |||
40 | def _req(self, verb, path, body=None, params=None): |
|
40 | def _req(self, verb, path, body=None, params=None): | |
41 | response = requests.request(verb, |
|
41 | response = requests.request(verb, | |
42 | url_path_join(self.base_url, 'api/contents', path), |
|
42 | url_path_join(self.base_url, 'api/contents', path), | |
43 | data=body, params=params, |
|
43 | data=body, params=params, | |
44 | ) |
|
44 | ) | |
45 | response.raise_for_status() |
|
45 | response.raise_for_status() | |
46 | return response |
|
46 | return response | |
47 |
|
47 | |||
48 | def list(self, path='/'): |
|
48 | def list(self, path='/'): | |
49 | return self._req('GET', path) |
|
49 | return self._req('GET', path) | |
50 |
|
50 | |||
51 | def read(self, path, type=None, format=None): |
|
51 | def read(self, path, type=None, format=None): | |
52 | params = {} |
|
52 | params = {} | |
53 | if type is not None: |
|
53 | if type is not None: | |
54 | params['type'] = type |
|
54 | params['type'] = type | |
55 | if format is not None: |
|
55 | if format is not None: | |
56 | params['format'] = format |
|
56 | params['format'] = format | |
57 | return self._req('GET', path, params=params) |
|
57 | return self._req('GET', path, params=params) | |
58 |
|
58 | |||
59 | def create_untitled(self, path='/', ext='.ipynb'): |
|
59 | def create_untitled(self, path='/', ext='.ipynb'): | |
60 | body = None |
|
60 | body = None | |
61 | if ext: |
|
61 | if ext: | |
62 | body = json.dumps({'ext': ext}) |
|
62 | body = json.dumps({'ext': ext}) | |
63 | return self._req('POST', path, body) |
|
63 | return self._req('POST', path, body) | |
64 |
|
64 | |||
65 | def mkdir_untitled(self, path='/'): |
|
65 | def mkdir_untitled(self, path='/'): | |
66 | return self._req('POST', path, json.dumps({'type': 'directory'})) |
|
66 | return self._req('POST', path, json.dumps({'type': 'directory'})) | |
67 |
|
67 | |||
68 | def copy(self, copy_from, path='/'): |
|
68 | def copy(self, copy_from, path='/'): | |
69 | body = json.dumps({'copy_from':copy_from}) |
|
69 | body = json.dumps({'copy_from':copy_from}) | |
70 | return self._req('POST', path, body) |
|
70 | return self._req('POST', path, body) | |
71 |
|
71 | |||
72 | def create(self, path='/'): |
|
72 | def create(self, path='/'): | |
73 | return self._req('PUT', path) |
|
73 | return self._req('PUT', path) | |
74 |
|
74 | |||
75 | def upload(self, path, body): |
|
75 | def upload(self, path, body): | |
76 | return self._req('PUT', path, body) |
|
76 | return self._req('PUT', path, body) | |
77 |
|
77 | |||
78 | def mkdir(self, path='/'): |
|
78 | def mkdir(self, path='/'): | |
79 | return self._req('PUT', path, json.dumps({'type': 'directory'})) |
|
79 | return self._req('PUT', path, json.dumps({'type': 'directory'})) | |
80 |
|
80 | |||
81 | def copy_put(self, copy_from, path='/'): |
|
81 | def copy_put(self, copy_from, path='/'): | |
82 | body = json.dumps({'copy_from':copy_from}) |
|
82 | body = json.dumps({'copy_from':copy_from}) | |
83 | return self._req('PUT', path, body) |
|
83 | return self._req('PUT', path, body) | |
84 |
|
84 | |||
85 | def save(self, path, body): |
|
85 | def save(self, path, body): | |
86 | return self._req('PUT', path, body) |
|
86 | return self._req('PUT', path, body) | |
87 |
|
87 | |||
88 | def delete(self, path='/'): |
|
88 | def delete(self, path='/'): | |
89 | return self._req('DELETE', path) |
|
89 | return self._req('DELETE', path) | |
90 |
|
90 | |||
91 | def rename(self, path, new_path): |
|
91 | def rename(self, path, new_path): | |
92 | body = json.dumps({'path': new_path}) |
|
92 | body = json.dumps({'path': new_path}) | |
93 | return self._req('PATCH', path, body) |
|
93 | return self._req('PATCH', path, body) | |
94 |
|
94 | |||
95 | def get_checkpoints(self, path): |
|
95 | def get_checkpoints(self, path): | |
96 | return self._req('GET', url_path_join(path, 'checkpoints')) |
|
96 | return self._req('GET', url_path_join(path, 'checkpoints')) | |
97 |
|
97 | |||
98 | def new_checkpoint(self, path): |
|
98 | def new_checkpoint(self, path): | |
99 | return self._req('POST', url_path_join(path, 'checkpoints')) |
|
99 | return self._req('POST', url_path_join(path, 'checkpoints')) | |
100 |
|
100 | |||
101 | def restore_checkpoint(self, path, checkpoint_id): |
|
101 | def restore_checkpoint(self, path, checkpoint_id): | |
102 | return self._req('POST', url_path_join(path, 'checkpoints', checkpoint_id)) |
|
102 | return self._req('POST', url_path_join(path, 'checkpoints', checkpoint_id)) | |
103 |
|
103 | |||
104 | def delete_checkpoint(self, path, checkpoint_id): |
|
104 | def delete_checkpoint(self, path, checkpoint_id): | |
105 | return self._req('DELETE', url_path_join(path, 'checkpoints', checkpoint_id)) |
|
105 | return self._req('DELETE', url_path_join(path, 'checkpoints', checkpoint_id)) | |
106 |
|
106 | |||
107 | class APITest(NotebookTestBase): |
|
107 | class APITest(NotebookTestBase): | |
108 | """Test the kernels web service API""" |
|
108 | """Test the kernels web service API""" | |
109 | dirs_nbs = [('', 'inroot'), |
|
109 | dirs_nbs = [('', 'inroot'), | |
110 | ('Directory with spaces in', 'inspace'), |
|
110 | ('Directory with spaces in', 'inspace'), | |
111 | (u'unicodΓ©', 'innonascii'), |
|
111 | (u'unicodΓ©', 'innonascii'), | |
112 | ('foo', 'a'), |
|
112 | ('foo', 'a'), | |
113 | ('foo', 'b'), |
|
113 | ('foo', 'b'), | |
114 | ('foo', 'name with spaces'), |
|
114 | ('foo', 'name with spaces'), | |
115 | ('foo', u'unicodΓ©'), |
|
115 | ('foo', u'unicodΓ©'), | |
116 | ('foo/bar', 'baz'), |
|
116 | ('foo/bar', 'baz'), | |
117 | ('ordering', 'A'), |
|
117 | ('ordering', 'A'), | |
118 | ('ordering', 'b'), |
|
118 | ('ordering', 'b'), | |
119 | ('ordering', 'C'), |
|
119 | ('ordering', 'C'), | |
120 | (u'Γ₯ b', u'Γ§ d'), |
|
120 | (u'Γ₯ b', u'Γ§ d'), | |
121 | ] |
|
121 | ] | |
122 | hidden_dirs = ['.hidden', '__pycache__'] |
|
122 | hidden_dirs = ['.hidden', '__pycache__'] | |
123 |
|
123 | |||
124 | # Don't include root dir. |
|
124 | # Don't include root dir. | |
125 | dirs = uniq_stable([py3compat.cast_unicode(d) for (d,n) in dirs_nbs[1:]]) |
|
125 | dirs = uniq_stable([py3compat.cast_unicode(d) for (d,n) in dirs_nbs[1:]]) | |
126 | top_level_dirs = {normalize('NFC', d.split('/')[0]) for d in dirs} |
|
126 | top_level_dirs = {normalize('NFC', d.split('/')[0]) for d in dirs} | |
127 |
|
127 | |||
128 | @staticmethod |
|
128 | @staticmethod | |
129 | def _blob_for_name(name): |
|
129 | def _blob_for_name(name): | |
130 | return name.encode('utf-8') + b'\xFF' |
|
130 | return name.encode('utf-8') + b'\xFF' | |
131 |
|
131 | |||
132 | @staticmethod |
|
132 | @staticmethod | |
133 | def _txt_for_name(name): |
|
133 | def _txt_for_name(name): | |
134 | return u'%s text file' % name |
|
134 | return u'%s text file' % name | |
135 |
|
135 | |||
136 | def to_os_path(self, api_path): |
|
136 | def to_os_path(self, api_path): | |
137 | return to_os_path(api_path, root=self.notebook_dir.name) |
|
137 | return to_os_path(api_path, root=self.notebook_dir.name) | |
138 |
|
138 | |||
139 | def make_dir(self, api_path): |
|
139 | def make_dir(self, api_path): | |
140 | """Create a directory at api_path""" |
|
140 | """Create a directory at api_path""" | |
141 | os_path = self.to_os_path(api_path) |
|
141 | os_path = self.to_os_path(api_path) | |
142 | try: |
|
142 | try: | |
143 | os.makedirs(os_path) |
|
143 | os.makedirs(os_path) | |
144 | except OSError: |
|
144 | except OSError: | |
145 | print("Directory already exists: %r" % os_path) |
|
145 | print("Directory already exists: %r" % os_path) | |
146 |
|
146 | |||
147 | def make_txt(self, api_path, txt): |
|
147 | def make_txt(self, api_path, txt): | |
148 | """Make a text file at a given api_path""" |
|
148 | """Make a text file at a given api_path""" | |
149 | os_path = self.to_os_path(api_path) |
|
149 | os_path = self.to_os_path(api_path) | |
150 | with io.open(os_path, 'w', encoding='utf-8') as f: |
|
150 | with io.open(os_path, 'w', encoding='utf-8') as f: | |
151 | f.write(txt) |
|
151 | f.write(txt) | |
152 |
|
152 | |||
153 | def make_blob(self, api_path, blob): |
|
153 | def make_blob(self, api_path, blob): | |
154 | """Make a binary file at a given api_path""" |
|
154 | """Make a binary file at a given api_path""" | |
155 | os_path = self.to_os_path(api_path) |
|
155 | os_path = self.to_os_path(api_path) | |
156 | with io.open(os_path, 'wb') as f: |
|
156 | with io.open(os_path, 'wb') as f: | |
157 | f.write(blob) |
|
157 | f.write(blob) | |
158 |
|
158 | |||
159 | def make_nb(self, api_path, nb): |
|
159 | def make_nb(self, api_path, nb): | |
160 | """Make a notebook file at a given api_path""" |
|
160 | """Make a notebook file at a given api_path""" | |
161 | os_path = self.to_os_path(api_path) |
|
161 | os_path = self.to_os_path(api_path) | |
162 |
|
162 | |||
163 | with io.open(os_path, 'w', encoding='utf-8') as f: |
|
163 | with io.open(os_path, 'w', encoding='utf-8') as f: | |
164 | write(nb, f, version=4) |
|
164 | write(nb, f, version=4) | |
165 |
|
165 | |||
166 | def delete_dir(self, api_path): |
|
166 | def delete_dir(self, api_path): | |
167 | """Delete a directory at api_path, removing any contents.""" |
|
167 | """Delete a directory at api_path, removing any contents.""" | |
168 | os_path = self.to_os_path(api_path) |
|
168 | os_path = self.to_os_path(api_path) | |
169 | shutil.rmtree(os_path, ignore_errors=True) |
|
169 | shutil.rmtree(os_path, ignore_errors=True) | |
170 |
|
170 | |||
171 | def delete_file(self, api_path): |
|
171 | def delete_file(self, api_path): | |
172 | """Delete a file at the given path if it exists.""" |
|
172 | """Delete a file at the given path if it exists.""" | |
173 | if self.isfile(api_path): |
|
173 | if self.isfile(api_path): | |
174 | os.unlink(self.to_os_path(api_path)) |
|
174 | os.unlink(self.to_os_path(api_path)) | |
175 |
|
175 | |||
176 | def isfile(self, api_path): |
|
176 | def isfile(self, api_path): | |
177 | return os.path.isfile(self.to_os_path(api_path)) |
|
177 | return os.path.isfile(self.to_os_path(api_path)) | |
178 |
|
178 | |||
179 | def isdir(self, api_path): |
|
179 | def isdir(self, api_path): | |
180 | return os.path.isdir(self.to_os_path(api_path)) |
|
180 | return os.path.isdir(self.to_os_path(api_path)) | |
181 |
|
181 | |||
182 | def setUp(self): |
|
182 | def setUp(self): | |
183 |
|
183 | |||
184 | for d in (self.dirs + self.hidden_dirs): |
|
184 | for d in (self.dirs + self.hidden_dirs): | |
185 | self.make_dir(d) |
|
185 | self.make_dir(d) | |
186 |
|
186 | |||
187 | for d, name in self.dirs_nbs: |
|
187 | for d, name in self.dirs_nbs: | |
188 | # create a notebook |
|
188 | # create a notebook | |
189 | nb = new_notebook() |
|
189 | nb = new_notebook() | |
190 | self.make_nb(u'{}/{}.ipynb'.format(d, name), nb) |
|
190 | self.make_nb(u'{}/{}.ipynb'.format(d, name), nb) | |
191 |
|
191 | |||
192 | # create a text file |
|
192 | # create a text file | |
193 | txt = self._txt_for_name(name) |
|
193 | txt = self._txt_for_name(name) | |
194 | self.make_txt(u'{}/{}.txt'.format(d, name), txt) |
|
194 | self.make_txt(u'{}/{}.txt'.format(d, name), txt) | |
195 |
|
195 | |||
196 | # create a binary file |
|
196 | # create a binary file | |
197 | blob = self._blob_for_name(name) |
|
197 | blob = self._blob_for_name(name) | |
198 | self.make_blob(u'{}/{}.blob'.format(d, name), blob) |
|
198 | self.make_blob(u'{}/{}.blob'.format(d, name), blob) | |
199 |
|
199 | |||
200 | self.api = API(self.base_url()) |
|
200 | self.api = API(self.base_url()) | |
201 |
|
201 | |||
202 | def tearDown(self): |
|
202 | def tearDown(self): | |
203 | for dname in (list(self.top_level_dirs) + self.hidden_dirs): |
|
203 | for dname in (list(self.top_level_dirs) + self.hidden_dirs): | |
204 | self.delete_dir(dname) |
|
204 | self.delete_dir(dname) | |
205 | self.delete_file('inroot.ipynb') |
|
205 | self.delete_file('inroot.ipynb') | |
206 |
|
206 | |||
207 | def test_list_notebooks(self): |
|
207 | def test_list_notebooks(self): | |
208 | nbs = notebooks_only(self.api.list().json()) |
|
208 | nbs = notebooks_only(self.api.list().json()) | |
209 | self.assertEqual(len(nbs), 1) |
|
209 | self.assertEqual(len(nbs), 1) | |
210 | self.assertEqual(nbs[0]['name'], 'inroot.ipynb') |
|
210 | self.assertEqual(nbs[0]['name'], 'inroot.ipynb') | |
211 |
|
211 | |||
212 | nbs = notebooks_only(self.api.list('/Directory with spaces in/').json()) |
|
212 | nbs = notebooks_only(self.api.list('/Directory with spaces in/').json()) | |
213 | self.assertEqual(len(nbs), 1) |
|
213 | self.assertEqual(len(nbs), 1) | |
214 | self.assertEqual(nbs[0]['name'], 'inspace.ipynb') |
|
214 | self.assertEqual(nbs[0]['name'], 'inspace.ipynb') | |
215 |
|
215 | |||
216 | nbs = notebooks_only(self.api.list(u'/unicodΓ©/').json()) |
|
216 | nbs = notebooks_only(self.api.list(u'/unicodΓ©/').json()) | |
217 | self.assertEqual(len(nbs), 1) |
|
217 | self.assertEqual(len(nbs), 1) | |
218 | self.assertEqual(nbs[0]['name'], 'innonascii.ipynb') |
|
218 | self.assertEqual(nbs[0]['name'], 'innonascii.ipynb') | |
219 | self.assertEqual(nbs[0]['path'], u'unicodΓ©/innonascii.ipynb') |
|
219 | self.assertEqual(nbs[0]['path'], u'unicodΓ©/innonascii.ipynb') | |
220 |
|
220 | |||
221 | nbs = notebooks_only(self.api.list('/foo/bar/').json()) |
|
221 | nbs = notebooks_only(self.api.list('/foo/bar/').json()) | |
222 | self.assertEqual(len(nbs), 1) |
|
222 | self.assertEqual(len(nbs), 1) | |
223 | self.assertEqual(nbs[0]['name'], 'baz.ipynb') |
|
223 | self.assertEqual(nbs[0]['name'], 'baz.ipynb') | |
224 | self.assertEqual(nbs[0]['path'], 'foo/bar/baz.ipynb') |
|
224 | self.assertEqual(nbs[0]['path'], 'foo/bar/baz.ipynb') | |
225 |
|
225 | |||
226 | nbs = notebooks_only(self.api.list('foo').json()) |
|
226 | nbs = notebooks_only(self.api.list('foo').json()) | |
227 | self.assertEqual(len(nbs), 4) |
|
227 | self.assertEqual(len(nbs), 4) | |
228 | nbnames = { normalize('NFC', n['name']) for n in nbs } |
|
228 | nbnames = { normalize('NFC', n['name']) for n in nbs } | |
229 | expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodΓ©.ipynb'] |
|
229 | expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodΓ©.ipynb'] | |
230 | expected = { normalize('NFC', name) for name in expected } |
|
230 | expected = { normalize('NFC', name) for name in expected } | |
231 | self.assertEqual(nbnames, expected) |
|
231 | self.assertEqual(nbnames, expected) | |
232 |
|
232 | |||
233 | nbs = notebooks_only(self.api.list('ordering').json()) |
|
233 | nbs = notebooks_only(self.api.list('ordering').json()) | |
234 | nbnames = [n['name'] for n in nbs] |
|
234 | nbnames = [n['name'] for n in nbs] | |
235 | expected = ['A.ipynb', 'b.ipynb', 'C.ipynb'] |
|
235 | expected = ['A.ipynb', 'b.ipynb', 'C.ipynb'] | |
236 | self.assertEqual(nbnames, expected) |
|
236 | self.assertEqual(nbnames, expected) | |
237 |
|
237 | |||
238 | def test_list_dirs(self): |
|
238 | def test_list_dirs(self): | |
239 | dirs = dirs_only(self.api.list().json()) |
|
239 | dirs = dirs_only(self.api.list().json()) | |
240 | dir_names = {normalize('NFC', d['name']) for d in dirs} |
|
240 | dir_names = {normalize('NFC', d['name']) for d in dirs} | |
241 | self.assertEqual(dir_names, self.top_level_dirs) # Excluding hidden dirs |
|
241 | self.assertEqual(dir_names, self.top_level_dirs) # Excluding hidden dirs | |
242 |
|
242 | |||
243 | def test_list_nonexistant_dir(self): |
|
243 | def test_list_nonexistant_dir(self): | |
244 | with assert_http_error(404): |
|
244 | with assert_http_error(404): | |
245 | self.api.list('nonexistant') |
|
245 | self.api.list('nonexistant') | |
246 |
|
246 | |||
247 | def test_get_nb_contents(self): |
|
247 | def test_get_nb_contents(self): | |
248 | for d, name in self.dirs_nbs: |
|
248 | for d, name in self.dirs_nbs: | |
249 | path = url_path_join(d, name + '.ipynb') |
|
249 | path = url_path_join(d, name + '.ipynb') | |
250 | nb = self.api.read(path).json() |
|
250 | nb = self.api.read(path).json() | |
251 | self.assertEqual(nb['name'], u'%s.ipynb' % name) |
|
251 | self.assertEqual(nb['name'], u'%s.ipynb' % name) | |
252 | self.assertEqual(nb['path'], path) |
|
252 | self.assertEqual(nb['path'], path) | |
253 | self.assertEqual(nb['type'], 'notebook') |
|
253 | self.assertEqual(nb['type'], 'notebook') | |
254 | self.assertIn('content', nb) |
|
254 | self.assertIn('content', nb) | |
255 | self.assertEqual(nb['format'], 'json') |
|
255 | self.assertEqual(nb['format'], 'json') | |
256 | self.assertIn('content', nb) |
|
256 | self.assertIn('content', nb) | |
257 | self.assertIn('metadata', nb['content']) |
|
257 | self.assertIn('metadata', nb['content']) | |
258 | self.assertIsInstance(nb['content']['metadata'], dict) |
|
258 | self.assertIsInstance(nb['content']['metadata'], dict) | |
259 |
|
259 | |||
260 | def test_get_contents_no_such_file(self): |
|
260 | def test_get_contents_no_such_file(self): | |
261 | # Name that doesn't exist - should be a 404 |
|
261 | # Name that doesn't exist - should be a 404 | |
262 | with assert_http_error(404): |
|
262 | with assert_http_error(404): | |
263 | self.api.read('foo/q.ipynb') |
|
263 | self.api.read('foo/q.ipynb') | |
264 |
|
264 | |||
265 | def test_get_text_file_contents(self): |
|
265 | def test_get_text_file_contents(self): | |
266 | for d, name in self.dirs_nbs: |
|
266 | for d, name in self.dirs_nbs: | |
267 | path = url_path_join(d, name + '.txt') |
|
267 | path = url_path_join(d, name + '.txt') | |
268 | model = self.api.read(path).json() |
|
268 | model = self.api.read(path).json() | |
269 | self.assertEqual(model['name'], u'%s.txt' % name) |
|
269 | self.assertEqual(model['name'], u'%s.txt' % name) | |
270 | self.assertEqual(model['path'], path) |
|
270 | self.assertEqual(model['path'], path) | |
271 | self.assertIn('content', model) |
|
271 | self.assertIn('content', model) | |
272 | self.assertEqual(model['format'], 'text') |
|
272 | self.assertEqual(model['format'], 'text') | |
273 | self.assertEqual(model['type'], 'file') |
|
273 | self.assertEqual(model['type'], 'file') | |
274 | self.assertEqual(model['content'], self._txt_for_name(name)) |
|
274 | self.assertEqual(model['content'], self._txt_for_name(name)) | |
275 |
|
275 | |||
276 | # Name that doesn't exist - should be a 404 |
|
276 | # Name that doesn't exist - should be a 404 | |
277 | with assert_http_error(404): |
|
277 | with assert_http_error(404): | |
278 | self.api.read('foo/q.txt') |
|
278 | self.api.read('foo/q.txt') | |
279 |
|
279 | |||
280 | # Specifying format=text should fail on a non-UTF-8 file |
|
280 | # Specifying format=text should fail on a non-UTF-8 file | |
281 | with assert_http_error(400): |
|
281 | with assert_http_error(400): | |
282 | self.api.read('foo/bar/baz.blob', type='file', format='text') |
|
282 | self.api.read('foo/bar/baz.blob', type='file', format='text') | |
283 |
|
283 | |||
284 | def test_get_binary_file_contents(self): |
|
284 | def test_get_binary_file_contents(self): | |
285 | for d, name in self.dirs_nbs: |
|
285 | for d, name in self.dirs_nbs: | |
286 | path = url_path_join(d, name + '.blob') |
|
286 | path = url_path_join(d, name + '.blob') | |
287 | model = self.api.read(path).json() |
|
287 | model = self.api.read(path).json() | |
288 | self.assertEqual(model['name'], u'%s.blob' % name) |
|
288 | self.assertEqual(model['name'], u'%s.blob' % name) | |
289 | self.assertEqual(model['path'], path) |
|
289 | self.assertEqual(model['path'], path) | |
290 | self.assertIn('content', model) |
|
290 | self.assertIn('content', model) | |
291 | self.assertEqual(model['format'], 'base64') |
|
291 | self.assertEqual(model['format'], 'base64') | |
292 | self.assertEqual(model['type'], 'file') |
|
292 | self.assertEqual(model['type'], 'file') | |
293 | self.assertEqual( |
|
293 | self.assertEqual( | |
294 | base64.decodestring(model['content'].encode('ascii')), |
|
294 | base64.decodestring(model['content'].encode('ascii')), | |
295 | self._blob_for_name(name), |
|
295 | self._blob_for_name(name), | |
296 | ) |
|
296 | ) | |
297 |
|
297 | |||
298 | # Name that doesn't exist - should be a 404 |
|
298 | # Name that doesn't exist - should be a 404 | |
299 | with assert_http_error(404): |
|
299 | with assert_http_error(404): | |
300 | self.api.read('foo/q.txt') |
|
300 | self.api.read('foo/q.txt') | |
301 |
|
301 | |||
302 | def test_get_bad_type(self): |
|
302 | def test_get_bad_type(self): | |
303 | with assert_http_error(400): |
|
303 | with assert_http_error(400): | |
304 | self.api.read(u'unicodΓ©', type='file') # this is a directory |
|
304 | self.api.read(u'unicodΓ©', type='file') # this is a directory | |
305 |
|
305 | |||
306 | with assert_http_error(400): |
|
306 | with assert_http_error(400): | |
307 | self.api.read(u'unicodΓ©/innonascii.ipynb', type='directory') |
|
307 | self.api.read(u'unicodΓ©/innonascii.ipynb', type='directory') | |
308 |
|
308 | |||
309 | def _check_created(self, resp, path, type='notebook'): |
|
309 | def _check_created(self, resp, path, type='notebook'): | |
310 | self.assertEqual(resp.status_code, 201) |
|
310 | self.assertEqual(resp.status_code, 201) | |
311 | location_header = py3compat.str_to_unicode(resp.headers['Location']) |
|
311 | location_header = py3compat.str_to_unicode(resp.headers['Location']) | |
312 | self.assertEqual(location_header, url_escape(url_path_join(u'/api/contents', path))) |
|
312 | self.assertEqual(location_header, url_escape(url_path_join(u'/api/contents', path))) | |
313 | rjson = resp.json() |
|
313 | rjson = resp.json() | |
314 | self.assertEqual(rjson['name'], path.rsplit('/', 1)[-1]) |
|
314 | self.assertEqual(rjson['name'], path.rsplit('/', 1)[-1]) | |
315 | self.assertEqual(rjson['path'], path) |
|
315 | self.assertEqual(rjson['path'], path) | |
316 | self.assertEqual(rjson['type'], type) |
|
316 | self.assertEqual(rjson['type'], type) | |
317 | isright = self.isdir if type == 'directory' else self.isfile |
|
317 | isright = self.isdir if type == 'directory' else self.isfile | |
318 | assert isright(path) |
|
318 | assert isright(path) | |
319 |
|
319 | |||
320 | def test_create_untitled(self): |
|
320 | def test_create_untitled(self): | |
321 | resp = self.api.create_untitled(path=u'Γ₯ b') |
|
321 | resp = self.api.create_untitled(path=u'Γ₯ b') | |
322 | self._check_created(resp, u'Γ₯ b/Untitled.ipynb') |
|
322 | self._check_created(resp, u'Γ₯ b/Untitled.ipynb') | |
323 |
|
323 | |||
324 | # Second time |
|
324 | # Second time | |
325 | resp = self.api.create_untitled(path=u'Γ₯ b') |
|
325 | resp = self.api.create_untitled(path=u'Γ₯ b') | |
326 | self._check_created(resp, u'Γ₯ b/Untitled1.ipynb') |
|
326 | self._check_created(resp, u'Γ₯ b/Untitled1.ipynb') | |
327 |
|
327 | |||
328 | # And two directories down |
|
328 | # And two directories down | |
329 | resp = self.api.create_untitled(path='foo/bar') |
|
329 | resp = self.api.create_untitled(path='foo/bar') | |
330 | self._check_created(resp, 'foo/bar/Untitled.ipynb') |
|
330 | self._check_created(resp, 'foo/bar/Untitled.ipynb') | |
331 |
|
331 | |||
332 | def test_create_untitled_txt(self): |
|
332 | def test_create_untitled_txt(self): | |
333 | resp = self.api.create_untitled(path='foo/bar', ext='.txt') |
|
333 | resp = self.api.create_untitled(path='foo/bar', ext='.txt') | |
334 | self._check_created(resp, 'foo/bar/untitled.txt', type='file') |
|
334 | self._check_created(resp, 'foo/bar/untitled.txt', type='file') | |
335 |
|
335 | |||
336 | resp = self.api.read(path='foo/bar/untitled.txt') |
|
336 | resp = self.api.read(path='foo/bar/untitled.txt') | |
337 | model = resp.json() |
|
337 | model = resp.json() | |
338 | self.assertEqual(model['type'], 'file') |
|
338 | self.assertEqual(model['type'], 'file') | |
339 | self.assertEqual(model['format'], 'text') |
|
339 | self.assertEqual(model['format'], 'text') | |
340 | self.assertEqual(model['content'], '') |
|
340 | self.assertEqual(model['content'], '') | |
341 |
|
341 | |||
342 | def test_upload(self): |
|
342 | def test_upload(self): | |
343 | nb = new_notebook() |
|
343 | nb = new_notebook() | |
344 | nbmodel = {'content': nb, 'type': 'notebook'} |
|
344 | nbmodel = {'content': nb, 'type': 'notebook'} | |
345 | path = u'Γ₯ b/Upload tΓ©st.ipynb' |
|
345 | path = u'Γ₯ b/Upload tΓ©st.ipynb' | |
346 | resp = self.api.upload(path, body=json.dumps(nbmodel)) |
|
346 | resp = self.api.upload(path, body=json.dumps(nbmodel)) | |
347 | self._check_created(resp, path) |
|
347 | self._check_created(resp, path) | |
348 |
|
348 | |||
349 | def test_mkdir_untitled(self): |
|
349 | def test_mkdir_untitled(self): | |
350 | resp = self.api.mkdir_untitled(path=u'Γ₯ b') |
|
350 | resp = self.api.mkdir_untitled(path=u'Γ₯ b') | |
351 | self._check_created(resp, u'Γ₯ b/Untitled Folder', type='directory') |
|
351 | self._check_created(resp, u'Γ₯ b/Untitled Folder', type='directory') | |
352 |
|
352 | |||
353 | # Second time |
|
353 | # Second time | |
354 | resp = self.api.mkdir_untitled(path=u'Γ₯ b') |
|
354 | resp = self.api.mkdir_untitled(path=u'Γ₯ b') | |
355 | self._check_created(resp, u'Γ₯ b/Untitled Folder 1', type='directory') |
|
355 | self._check_created(resp, u'Γ₯ b/Untitled Folder 1', type='directory') | |
356 |
|
356 | |||
357 | # And two directories down |
|
357 | # And two directories down | |
358 | resp = self.api.mkdir_untitled(path='foo/bar') |
|
358 | resp = self.api.mkdir_untitled(path='foo/bar') | |
359 | self._check_created(resp, 'foo/bar/Untitled Folder', type='directory') |
|
359 | self._check_created(resp, 'foo/bar/Untitled Folder', type='directory') | |
360 |
|
360 | |||
361 | def test_mkdir(self): |
|
361 | def test_mkdir(self): | |
362 | path = u'Γ₯ b/New βir' |
|
362 | path = u'Γ₯ b/New βir' | |
363 | resp = self.api.mkdir(path) |
|
363 | resp = self.api.mkdir(path) | |
364 | self._check_created(resp, path, type='directory') |
|
364 | self._check_created(resp, path, type='directory') | |
365 |
|
365 | |||
366 | def test_mkdir_hidden_400(self): |
|
366 | def test_mkdir_hidden_400(self): | |
367 | with assert_http_error(400): |
|
367 | with assert_http_error(400): | |
368 | resp = self.api.mkdir(u'Γ₯ b/.hidden') |
|
368 | resp = self.api.mkdir(u'Γ₯ b/.hidden') | |
369 |
|
369 | |||
370 | def test_upload_txt(self): |
|
370 | def test_upload_txt(self): | |
371 | body = u'ΓΌnicode tΓ©xt' |
|
371 | body = u'ΓΌnicode tΓ©xt' | |
372 | model = { |
|
372 | model = { | |
373 | 'content' : body, |
|
373 | 'content' : body, | |
374 | 'format' : 'text', |
|
374 | 'format' : 'text', | |
375 | 'type' : 'file', |
|
375 | 'type' : 'file', | |
376 | } |
|
376 | } | |
377 | path = u'Γ₯ b/Upload tΓ©st.txt' |
|
377 | path = u'Γ₯ b/Upload tΓ©st.txt' | |
378 | resp = self.api.upload(path, body=json.dumps(model)) |
|
378 | resp = self.api.upload(path, body=json.dumps(model)) | |
379 |
|
379 | |||
380 | # check roundtrip |
|
380 | # check roundtrip | |
381 | resp = self.api.read(path) |
|
381 | resp = self.api.read(path) | |
382 | model = resp.json() |
|
382 | model = resp.json() | |
383 | self.assertEqual(model['type'], 'file') |
|
383 | self.assertEqual(model['type'], 'file') | |
384 | self.assertEqual(model['format'], 'text') |
|
384 | self.assertEqual(model['format'], 'text') | |
385 | self.assertEqual(model['content'], body) |
|
385 | self.assertEqual(model['content'], body) | |
386 |
|
386 | |||
387 | def test_upload_b64(self): |
|
387 | def test_upload_b64(self): | |
388 | body = b'\xFFblob' |
|
388 | body = b'\xFFblob' | |
389 | b64body = base64.encodestring(body).decode('ascii') |
|
389 | b64body = base64.encodestring(body).decode('ascii') | |
390 | model = { |
|
390 | model = { | |
391 | 'content' : b64body, |
|
391 | 'content' : b64body, | |
392 | 'format' : 'base64', |
|
392 | 'format' : 'base64', | |
393 | 'type' : 'file', |
|
393 | 'type' : 'file', | |
394 | } |
|
394 | } | |
395 | path = u'Γ₯ b/Upload tΓ©st.blob' |
|
395 | path = u'Γ₯ b/Upload tΓ©st.blob' | |
396 | resp = self.api.upload(path, body=json.dumps(model)) |
|
396 | resp = self.api.upload(path, body=json.dumps(model)) | |
397 |
|
397 | |||
398 | # check roundtrip |
|
398 | # check roundtrip | |
399 | resp = self.api.read(path) |
|
399 | resp = self.api.read(path) | |
400 | model = resp.json() |
|
400 | model = resp.json() | |
401 | self.assertEqual(model['type'], 'file') |
|
401 | self.assertEqual(model['type'], 'file') | |
402 | self.assertEqual(model['path'], path) |
|
402 | self.assertEqual(model['path'], path) | |
403 | self.assertEqual(model['format'], 'base64') |
|
403 | self.assertEqual(model['format'], 'base64') | |
404 | decoded = base64.decodestring(model['content'].encode('ascii')) |
|
404 | decoded = base64.decodestring(model['content'].encode('ascii')) | |
405 | self.assertEqual(decoded, body) |
|
405 | self.assertEqual(decoded, body) | |
406 |
|
406 | |||
407 | def test_upload_v2(self): |
|
407 | def test_upload_v2(self): | |
408 | nb = v2.new_notebook() |
|
408 | nb = v2.new_notebook() | |
409 | ws = v2.new_worksheet() |
|
409 | ws = v2.new_worksheet() | |
410 | nb.worksheets.append(ws) |
|
410 | nb.worksheets.append(ws) | |
411 | ws.cells.append(v2.new_code_cell(input='print("hi")')) |
|
411 | ws.cells.append(v2.new_code_cell(input='print("hi")')) | |
412 | nbmodel = {'content': nb, 'type': 'notebook'} |
|
412 | nbmodel = {'content': nb, 'type': 'notebook'} | |
413 | path = u'Γ₯ b/Upload tΓ©st.ipynb' |
|
413 | path = u'Γ₯ b/Upload tΓ©st.ipynb' | |
414 | resp = self.api.upload(path, body=json.dumps(nbmodel)) |
|
414 | resp = self.api.upload(path, body=json.dumps(nbmodel)) | |
415 | self._check_created(resp, path) |
|
415 | self._check_created(resp, path) | |
416 | resp = self.api.read(path) |
|
416 | resp = self.api.read(path) | |
417 | data = resp.json() |
|
417 | data = resp.json() | |
418 | self.assertEqual(data['content']['nbformat'], 4) |
|
418 | self.assertEqual(data['content']['nbformat'], 4) | |
419 |
|
419 | |||
420 | def test_copy(self): |
|
420 | def test_copy(self): | |
421 | resp = self.api.copy(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b') |
|
421 | resp = self.api.copy(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b') | |
422 | self._check_created(resp, u'Γ₯ b/Γ§ d-Copy1.ipynb') |
|
422 | self._check_created(resp, u'Γ₯ b/Γ§ d-Copy1.ipynb') | |
423 |
|
423 | |||
424 | resp = self.api.copy(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b') |
|
424 | resp = self.api.copy(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b') | |
425 | self._check_created(resp, u'Γ₯ b/Γ§ d-Copy2.ipynb') |
|
425 | self._check_created(resp, u'Γ₯ b/Γ§ d-Copy2.ipynb') | |
426 |
|
426 | |||
427 | def test_copy_copy(self): |
|
427 | def test_copy_copy(self): | |
428 | resp = self.api.copy(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b') |
|
428 | resp = self.api.copy(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b') | |
429 | self._check_created(resp, u'Γ₯ b/Γ§ d-Copy1.ipynb') |
|
429 | self._check_created(resp, u'Γ₯ b/Γ§ d-Copy1.ipynb') | |
430 |
|
430 | |||
431 | resp = self.api.copy(u'Γ₯ b/Γ§ d-Copy1.ipynb', u'Γ₯ b') |
|
431 | resp = self.api.copy(u'Γ₯ b/Γ§ d-Copy1.ipynb', u'Γ₯ b') | |
432 | self._check_created(resp, u'Γ₯ b/Γ§ d-Copy2.ipynb') |
|
432 | self._check_created(resp, u'Γ₯ b/Γ§ d-Copy2.ipynb') | |
433 |
|
433 | |||
434 | def test_copy_path(self): |
|
434 | def test_copy_path(self): | |
435 | resp = self.api.copy(u'foo/a.ipynb', u'Γ₯ b') |
|
435 | resp = self.api.copy(u'foo/a.ipynb', u'Γ₯ b') | |
436 | self._check_created(resp, u'Γ₯ b/a.ipynb') |
|
436 | self._check_created(resp, u'Γ₯ b/a.ipynb') | |
437 |
|
437 | |||
438 | resp = self.api.copy(u'foo/a.ipynb', u'Γ₯ b') |
|
438 | resp = self.api.copy(u'foo/a.ipynb', u'Γ₯ b') | |
439 | self._check_created(resp, u'Γ₯ b/a-Copy1.ipynb') |
|
439 | self._check_created(resp, u'Γ₯ b/a-Copy1.ipynb') | |
440 |
|
440 | |||
441 | def test_copy_put_400(self): |
|
441 | def test_copy_put_400(self): | |
442 | with assert_http_error(400): |
|
442 | with assert_http_error(400): | |
443 | resp = self.api.copy_put(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b/cΓΈpy.ipynb') |
|
443 | resp = self.api.copy_put(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b/cΓΈpy.ipynb') | |
444 |
|
444 | |||
445 | def test_copy_dir_400(self): |
|
445 | def test_copy_dir_400(self): | |
446 | # can't copy directories |
|
446 | # can't copy directories | |
447 | with assert_http_error(400): |
|
447 | with assert_http_error(400): | |
448 | resp = self.api.copy(u'Γ₯ b', u'foo') |
|
448 | resp = self.api.copy(u'Γ₯ b', u'foo') | |
449 |
|
449 | |||
450 | def test_delete(self): |
|
450 | def test_delete(self): | |
451 | for d, name in self.dirs_nbs: |
|
451 | for d, name in self.dirs_nbs: | |
452 | print('%r, %r' % (d, name)) |
|
452 | print('%r, %r' % (d, name)) | |
453 | resp = self.api.delete(url_path_join(d, name + '.ipynb')) |
|
453 | resp = self.api.delete(url_path_join(d, name + '.ipynb')) | |
454 | self.assertEqual(resp.status_code, 204) |
|
454 | self.assertEqual(resp.status_code, 204) | |
455 |
|
455 | |||
456 | for d in self.dirs + ['/']: |
|
456 | for d in self.dirs + ['/']: | |
457 | nbs = notebooks_only(self.api.list(d).json()) |
|
457 | nbs = notebooks_only(self.api.list(d).json()) | |
458 | print('------') |
|
458 | print('------') | |
459 | print(d) |
|
459 | print(d) | |
460 | print(nbs) |
|
460 | print(nbs) | |
461 | self.assertEqual(nbs, []) |
|
461 | self.assertEqual(nbs, []) | |
462 |
|
462 | |||
463 | def test_delete_dirs(self): |
|
463 | def test_delete_dirs(self): | |
464 | # depth-first delete everything, so we don't try to delete empty directories |
|
464 | # depth-first delete everything, so we don't try to delete empty directories | |
465 | for name in sorted(self.dirs + ['/'], key=len, reverse=True): |
|
465 | for name in sorted(self.dirs + ['/'], key=len, reverse=True): | |
466 | listing = self.api.list(name).json()['content'] |
|
466 | listing = self.api.list(name).json()['content'] | |
467 | for model in listing: |
|
467 | for model in listing: | |
468 | self.api.delete(model['path']) |
|
468 | self.api.delete(model['path']) | |
469 | listing = self.api.list('/').json()['content'] |
|
469 | listing = self.api.list('/').json()['content'] | |
470 | self.assertEqual(listing, []) |
|
470 | self.assertEqual(listing, []) | |
471 |
|
471 | |||
472 | def test_delete_non_empty_dir(self): |
|
472 | def test_delete_non_empty_dir(self): | |
473 | """delete non-empty dir raises 400""" |
|
473 | """delete non-empty dir raises 400""" | |
474 | with assert_http_error(400): |
|
474 | with assert_http_error(400): | |
475 | self.api.delete(u'Γ₯ b') |
|
475 | self.api.delete(u'Γ₯ b') | |
476 |
|
476 | |||
477 | def test_rename(self): |
|
477 | def test_rename(self): | |
478 | resp = self.api.rename('foo/a.ipynb', 'foo/z.ipynb') |
|
478 | resp = self.api.rename('foo/a.ipynb', 'foo/z.ipynb') | |
479 | self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb') |
|
479 | self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb') | |
480 | self.assertEqual(resp.json()['name'], 'z.ipynb') |
|
480 | self.assertEqual(resp.json()['name'], 'z.ipynb') | |
481 | self.assertEqual(resp.json()['path'], 'foo/z.ipynb') |
|
481 | self.assertEqual(resp.json()['path'], 'foo/z.ipynb') | |
482 | assert self.isfile('foo/z.ipynb') |
|
482 | assert self.isfile('foo/z.ipynb') | |
483 |
|
483 | |||
484 | nbs = notebooks_only(self.api.list('foo').json()) |
|
484 | nbs = notebooks_only(self.api.list('foo').json()) | |
485 | nbnames = set(n['name'] for n in nbs) |
|
485 | nbnames = set(n['name'] for n in nbs) | |
486 | self.assertIn('z.ipynb', nbnames) |
|
486 | self.assertIn('z.ipynb', nbnames) | |
487 | self.assertNotIn('a.ipynb', nbnames) |
|
487 | self.assertNotIn('a.ipynb', nbnames) | |
488 |
|
488 | |||
489 | def test_rename_existing(self): |
|
489 | def test_rename_existing(self): | |
490 | with assert_http_error(409): |
|
490 | with assert_http_error(409): | |
491 | self.api.rename('foo/a.ipynb', 'foo/b.ipynb') |
|
491 | self.api.rename('foo/a.ipynb', 'foo/b.ipynb') | |
492 |
|
492 | |||
493 | def test_save(self): |
|
493 | def test_save(self): | |
494 | resp = self.api.read('foo/a.ipynb') |
|
494 | resp = self.api.read('foo/a.ipynb') | |
495 | nbcontent = json.loads(resp.text)['content'] |
|
495 | nbcontent = json.loads(resp.text)['content'] | |
496 | nb = from_dict(nbcontent) |
|
496 | nb = from_dict(nbcontent) | |
497 | nb.cells.append(new_markdown_cell(u'Created by test Β³')) |
|
497 | nb.cells.append(new_markdown_cell(u'Created by test Β³')) | |
498 |
|
498 | |||
499 | nbmodel= {'content': nb, 'type': 'notebook'} |
|
499 | nbmodel= {'content': nb, 'type': 'notebook'} | |
500 | resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) |
|
500 | resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) | |
501 |
|
501 | |||
502 | nbcontent = self.api.read('foo/a.ipynb').json()['content'] |
|
502 | nbcontent = self.api.read('foo/a.ipynb').json()['content'] | |
503 | newnb = from_dict(nbcontent) |
|
503 | newnb = from_dict(nbcontent) | |
504 | self.assertEqual(newnb.cells[0].source, |
|
504 | self.assertEqual(newnb.cells[0].source, | |
505 | u'Created by test Β³') |
|
505 | u'Created by test Β³') | |
506 |
|
506 | |||
507 | def test_checkpoints(self): |
|
507 | def test_checkpoints(self): | |
508 | resp = self.api.read('foo/a.ipynb') |
|
508 | resp = self.api.read('foo/a.ipynb') | |
509 | r = self.api.new_checkpoint('foo/a.ipynb') |
|
509 | r = self.api.new_checkpoint('foo/a.ipynb') | |
510 | self.assertEqual(r.status_code, 201) |
|
510 | self.assertEqual(r.status_code, 201) | |
511 | cp1 = r.json() |
|
511 | cp1 = r.json() | |
512 | self.assertEqual(set(cp1), {'id', 'last_modified'}) |
|
512 | self.assertEqual(set(cp1), {'id', 'last_modified'}) | |
513 | self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) |
|
513 | self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) | |
514 |
|
514 | |||
515 | # Modify it |
|
515 | # Modify it | |
516 | nbcontent = json.loads(resp.text)['content'] |
|
516 | nbcontent = json.loads(resp.text)['content'] | |
517 | nb = from_dict(nbcontent) |
|
517 | nb = from_dict(nbcontent) | |
518 | hcell = new_markdown_cell('Created by test') |
|
518 | hcell = new_markdown_cell('Created by test') | |
519 | nb.cells.append(hcell) |
|
519 | nb.cells.append(hcell) | |
520 | # Save |
|
520 | # Save | |
521 | nbmodel= {'content': nb, 'type': 'notebook'} |
|
521 | nbmodel= {'content': nb, 'type': 'notebook'} | |
522 | resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) |
|
522 | resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) | |
523 |
|
523 | |||
524 | # List checkpoints |
|
524 | # List checkpoints | |
525 | cps = self.api.get_checkpoints('foo/a.ipynb').json() |
|
525 | cps = self.api.get_checkpoints('foo/a.ipynb').json() | |
526 | self.assertEqual(cps, [cp1]) |
|
526 | self.assertEqual(cps, [cp1]) | |
527 |
|
527 | |||
528 | nbcontent = self.api.read('foo/a.ipynb').json()['content'] |
|
528 | nbcontent = self.api.read('foo/a.ipynb').json()['content'] | |
529 | nb = from_dict(nbcontent) |
|
529 | nb = from_dict(nbcontent) | |
530 | self.assertEqual(nb.cells[0].source, 'Created by test') |
|
530 | self.assertEqual(nb.cells[0].source, 'Created by test') | |
531 |
|
531 | |||
532 | # Restore cp1 |
|
532 | # Restore cp1 | |
533 | r = self.api.restore_checkpoint('foo/a.ipynb', cp1['id']) |
|
533 | r = self.api.restore_checkpoint('foo/a.ipynb', cp1['id']) | |
534 | self.assertEqual(r.status_code, 204) |
|
534 | self.assertEqual(r.status_code, 204) | |
535 | nbcontent = self.api.read('foo/a.ipynb').json()['content'] |
|
535 | nbcontent = self.api.read('foo/a.ipynb').json()['content'] | |
536 | nb = from_dict(nbcontent) |
|
536 | nb = from_dict(nbcontent) | |
537 | self.assertEqual(nb.cells, []) |
|
537 | self.assertEqual(nb.cells, []) | |
538 |
|
538 | |||
539 | # Delete cp1 |
|
539 | # Delete cp1 | |
540 | r = self.api.delete_checkpoint('foo/a.ipynb', cp1['id']) |
|
540 | r = self.api.delete_checkpoint('foo/a.ipynb', cp1['id']) | |
541 | self.assertEqual(r.status_code, 204) |
|
541 | self.assertEqual(r.status_code, 204) | |
542 | cps = self.api.get_checkpoints('foo/a.ipynb').json() |
|
542 | cps = self.api.get_checkpoints('foo/a.ipynb').json() | |
543 | self.assertEqual(cps, []) |
|
543 | self.assertEqual(cps, []) | |
544 |
|
544 | |||
545 | def test_file_checkpoints(self): |
|
545 | def test_file_checkpoints(self): | |
546 | """ |
|
546 | """ | |
547 | Test checkpointing of non-notebook files. |
|
547 | Test checkpointing of non-notebook files. | |
548 | """ |
|
548 | """ | |
549 | filename = 'foo/a.txt' |
|
549 | filename = 'foo/a.txt' | |
550 | resp = self.api.read(filename) |
|
550 | resp = self.api.read(filename) | |
551 | orig_content = json.loads(resp.text)['content'] |
|
551 | orig_content = json.loads(resp.text)['content'] | |
552 |
|
552 | |||
553 | # Create a checkpoint. |
|
553 | # Create a checkpoint. | |
554 | r = self.api.new_checkpoint(filename) |
|
554 | r = self.api.new_checkpoint(filename) | |
555 | self.assertEqual(r.status_code, 201) |
|
555 | self.assertEqual(r.status_code, 201) | |
556 | cp1 = r.json() |
|
556 | cp1 = r.json() | |
557 | self.assertEqual(set(cp1), {'id', 'last_modified'}) |
|
557 | self.assertEqual(set(cp1), {'id', 'last_modified'}) | |
558 | self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) |
|
558 | self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) | |
559 |
|
559 | |||
560 | # Modify the file and save. |
|
560 | # Modify the file and save. | |
561 | new_content = orig_content + '\nsecond line' |
|
561 | new_content = orig_content + '\nsecond line' | |
562 | model = { |
|
562 | model = { | |
563 | 'content': new_content, |
|
563 | 'content': new_content, | |
564 | 'type': 'file', |
|
564 | 'type': 'file', | |
565 | 'format': 'text', |
|
565 | 'format': 'text', | |
566 | } |
|
566 | } | |
567 | resp = self.api.save(filename, body=json.dumps(model)) |
|
567 | resp = self.api.save(filename, body=json.dumps(model)) | |
568 |
|
568 | |||
569 | # List checkpoints |
|
569 | # List checkpoints | |
570 | cps = self.api.get_checkpoints(filename).json() |
|
570 | cps = self.api.get_checkpoints(filename).json() | |
571 | self.assertEqual(cps, [cp1]) |
|
571 | self.assertEqual(cps, [cp1]) | |
572 |
|
572 | |||
573 | content = self.api.read(filename).json()['content'] |
|
573 | content = self.api.read(filename).json()['content'] | |
574 | self.assertEqual(content, new_content) |
|
574 | self.assertEqual(content, new_content) | |
575 |
|
575 | |||
576 | # Restore cp1 |
|
576 | # Restore cp1 | |
577 | r = self.api.restore_checkpoint(filename, cp1['id']) |
|
577 | r = self.api.restore_checkpoint(filename, cp1['id']) | |
578 | self.assertEqual(r.status_code, 204) |
|
578 | self.assertEqual(r.status_code, 204) | |
579 | restored_content = self.api.read(filename).json()['content'] |
|
579 | restored_content = self.api.read(filename).json()['content'] | |
580 | self.assertEqual(restored_content, orig_content) |
|
580 | self.assertEqual(restored_content, orig_content) | |
581 |
|
581 | |||
582 | # Delete cp1 |
|
582 | # Delete cp1 | |
583 | r = self.api.delete_checkpoint(filename, cp1['id']) |
|
583 | r = self.api.delete_checkpoint(filename, cp1['id']) | |
584 | self.assertEqual(r.status_code, 204) |
|
584 | self.assertEqual(r.status_code, 204) | |
585 | cps = self.api.get_checkpoints(filename).json() |
|
585 | cps = self.api.get_checkpoints(filename).json() | |
586 | self.assertEqual(cps, []) |
|
586 | self.assertEqual(cps, []) | |
587 |
|
587 | |||
588 | @contextmanager |
|
588 | @contextmanager | |
589 | def patch_cp_root(self, dirname): |
|
589 | def patch_cp_root(self, dirname): | |
590 | """ |
|
590 | """ | |
591 | Temporarily patch the root dir of our checkpoint manager. |
|
591 | Temporarily patch the root dir of our checkpoint manager. | |
592 | """ |
|
592 | """ | |
593 | cpm = self.notebook.contents_manager.checkpoint_manager |
|
593 | cpm = self.notebook.contents_manager.checkpoint_manager | |
594 | old_dirname = cpm.root_dir |
|
594 | old_dirname = cpm.root_dir | |
595 | cpm.root_dir = dirname |
|
595 | cpm.root_dir = dirname | |
596 | try: |
|
596 | try: | |
597 | yield |
|
597 | yield | |
598 | finally: |
|
598 | finally: | |
599 | cpm.root_dir = old_dirname |
|
599 | cpm.root_dir = old_dirname | |
600 |
|
600 | |||
601 | def test_checkpoints_separate_root(self): |
|
601 | def test_checkpoints_separate_root(self): | |
602 | """ |
|
602 | """ | |
603 | Test that FileCheckpointManager functions correctly even when it's |
|
603 | Test that FileCheckpointManager functions correctly even when it's | |
604 | using a different root dir from FileContentsManager. This also keeps |
|
604 | using a different root dir from FileContentsManager. This also keeps | |
605 | the implementation honest for use with ContentsManagers that don't map |
|
605 | the implementation honest for use with ContentsManagers that don't map | |
606 | models to the filesystem |
|
606 | models to the filesystem | |
607 |
|
607 | |||
608 | Override this method to a no-op when testing other managers. |
|
608 | Override this method to a no-op when testing other managers. | |
609 | """ |
|
609 | """ | |
610 | with TemporaryDirectory() as td: |
|
610 | with TemporaryDirectory() as td: | |
611 | with self.patch_cp_root(td): |
|
611 | with self.patch_cp_root(td): | |
612 | self.test_checkpoints() |
|
612 | self.test_checkpoints() | |
613 |
|
613 | |||
614 | with TemporaryDirectory() as td: |
|
614 | with TemporaryDirectory() as td: | |
615 | with self.patch_cp_root(td): |
|
615 | with self.patch_cp_root(td): | |
616 | self.test_file_checkpoints() |
|
616 | self.test_file_checkpoints() | |
|
617 | ||||
|
618 | @contextmanager | |||
|
619 | def patch_cm_backend(self): | |||
|
620 | """ | |||
|
621 | Temporarily patch our ContentsManager to present a different backend. | |||
|
622 | """ | |||
|
623 | mgr = self.notebook.contents_manager | |||
|
624 | old_backend = mgr.backend | |||
|
625 | mgr.backend = "" | |||
|
626 | try: | |||
|
627 | yield | |||
|
628 | finally: | |||
|
629 | mgr.backend = old_backend | |||
|
630 | ||||
|
631 | def test_checkpoints_empty_backend(self): | |||
|
632 | with self.patch_cm_backend(): | |||
|
633 | self.test_checkpoints() | |||
|
634 | ||||
|
635 | with self.patch_cm_backend(): | |||
|
636 | self.test_file_checkpoints() | |||
|
637 | ||||
|
638 |
General Comments 0
You need to be logged in to leave comments.
Login now