Show More
@@ -1,576 +1,536 b'' | |||||
1 | """A base class for contents managers.""" |
|
1 | """A base class for contents managers.""" | |
2 |
|
2 | |||
3 | # Copyright (c) IPython Development Team. |
|
3 | # Copyright (c) IPython Development Team. | |
4 | # Distributed under the terms of the Modified BSD License. |
|
4 | # Distributed under the terms of the Modified BSD License. | |
5 |
|
5 | |||
6 | from fnmatch import fnmatch |
|
6 | from fnmatch import fnmatch | |
7 | import itertools |
|
7 | import itertools | |
8 | import json |
|
8 | import json | |
9 | import os |
|
9 | import os | |
10 | import re |
|
10 | import re | |
11 |
|
11 | |||
12 | from tornado.web import HTTPError |
|
12 | from tornado.web import HTTPError | |
13 |
|
13 | |||
14 | from IPython import nbformat |
|
14 | from IPython import nbformat | |
15 | from IPython.config.configurable import LoggingConfigurable |
|
15 | from IPython.config.configurable import LoggingConfigurable | |
16 | from IPython.nbformat import sign, validate, ValidationError |
|
16 | from IPython.nbformat import sign, validate, ValidationError | |
17 | from IPython.nbformat.v4 import new_notebook |
|
17 | from IPython.nbformat.v4 import new_notebook | |
18 | from IPython.utils.importstring import import_item |
|
18 | from IPython.utils.importstring import import_item | |
19 | from IPython.utils.traitlets import ( |
|
19 | from IPython.utils.traitlets import ( | |
20 | Any, |
|
20 | Any, | |
21 | Dict, |
|
21 | Dict, | |
22 | Instance, |
|
22 | Instance, | |
23 | List, |
|
23 | List, | |
24 | TraitError, |
|
24 | TraitError, | |
25 | Type, |
|
25 | Type, | |
26 | Unicode, |
|
26 | Unicode, | |
27 | ) |
|
27 | ) | |
28 | from IPython.utils.py3compat import string_types |
|
28 | from IPython.utils.py3compat import string_types | |
29 |
|
29 | |||
30 | copy_pat = re.compile(r'\-Copy\d*\.') |
|
30 | copy_pat = re.compile(r'\-Copy\d*\.') | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | def _separate_dirs_files(models): |
|
|||
34 | """ |
|
|||
35 | Split an iterable of models into a list of file paths and a list of |
|
|||
36 | directory paths. |
|
|||
37 | """ |
|
|||
38 | dirs = [] |
|
|||
39 | files = [] |
|
|||
40 | for model in models: |
|
|||
41 | if model['type'] == 'directory': |
|
|||
42 | dirs.append(model['path']) |
|
|||
43 | else: |
|
|||
44 | files.append(model['path']) |
|
|||
45 | return dirs, files |
|
|||
46 |
|
||||
47 |
|
||||
48 | class CheckpointManager(LoggingConfigurable): |
|
33 | class CheckpointManager(LoggingConfigurable): | |
49 | """ |
|
34 | """ | |
50 | Base class for managing checkpoints for a ContentsManager. |
|
35 | Base class for managing checkpoints for a ContentsManager. | |
51 | """ |
|
36 | """ | |
52 | def create_file_checkpoint(self, content, format, path): |
|
37 | def create_file_checkpoint(self, content, format, path): | |
53 | """Create a checkpoint of the current state of a file |
|
38 | """Create a checkpoint of the current state of a file | |
54 |
|
39 | |||
55 | Returns a checkpoint model for the new checkpoint. |
|
40 | Returns a checkpoint model for the new checkpoint. | |
56 | """ |
|
41 | """ | |
57 | raise NotImplementedError("must be implemented in a subclass") |
|
42 | raise NotImplementedError("must be implemented in a subclass") | |
58 |
|
43 | |||
59 | def create_notebook_checkpoint(self, nb, path): |
|
44 | def create_notebook_checkpoint(self, nb, path): | |
60 | """Create a checkpoint of the current state of a file |
|
45 | """Create a checkpoint of the current state of a file | |
61 |
|
46 | |||
62 | Returns a checkpoint model for the new checkpoint. |
|
47 | Returns a checkpoint model for the new checkpoint. | |
63 | """ |
|
48 | """ | |
64 | raise NotImplementedError("must be implemented in a subclass") |
|
49 | raise NotImplementedError("must be implemented in a subclass") | |
65 |
|
50 | |||
66 | def get_checkpoint(self, checkpoint_id, path, type): |
|
51 | def get_checkpoint(self, checkpoint_id, path, type): | |
67 | """Get the content of a checkpoint. |
|
52 | """Get the content of a checkpoint. | |
68 |
|
53 | |||
69 | Returns an unvalidated model with the same structure as |
|
54 | Returns an unvalidated model with the same structure as | |
70 | the return value of ContentsManager.get |
|
55 | the return value of ContentsManager.get | |
71 | """ |
|
56 | """ | |
72 | raise NotImplementedError("must be implemented in a subclass") |
|
57 | raise NotImplementedError("must be implemented in a subclass") | |
73 |
|
58 | |||
74 | def rename_checkpoint(self, checkpoint_id, old_path, new_path): |
|
59 | def rename_checkpoint(self, checkpoint_id, old_path, new_path): | |
75 | """Rename a single checkpoint from old_path to new_path.""" |
|
60 | """Rename a single checkpoint from old_path to new_path.""" | |
76 | raise NotImplementedError("must be implemented in a subclass") |
|
61 | raise NotImplementedError("must be implemented in a subclass") | |
77 |
|
62 | |||
78 | def delete_checkpoint(self, checkpoint_id, path): |
|
63 | def delete_checkpoint(self, checkpoint_id, path): | |
79 | """delete a checkpoint for a file""" |
|
64 | """delete a checkpoint for a file""" | |
80 | raise NotImplementedError("must be implemented in a subclass") |
|
65 | raise NotImplementedError("must be implemented in a subclass") | |
81 |
|
66 | |||
82 | def list_checkpoints(self, path): |
|
67 | def list_checkpoints(self, path): | |
83 | """Return a list of checkpoints for a given file""" |
|
68 | """Return a list of checkpoints for a given file""" | |
84 | raise NotImplementedError("must be implemented in a subclass") |
|
69 | raise NotImplementedError("must be implemented in a subclass") | |
85 |
|
70 | |||
86 | def rename_all_checkpoints(self, old_path, new_path): |
|
71 | def rename_all_checkpoints(self, old_path, new_path): | |
87 | """Rename all checkpoints for old_path to new_path.""" |
|
72 | """Rename all checkpoints for old_path to new_path.""" | |
88 | for cp in self.list_checkpoints(old_path): |
|
73 | for cp in self.list_checkpoints(old_path): | |
89 | self.rename_checkpoint(cp['id'], old_path, new_path) |
|
74 | self.rename_checkpoint(cp['id'], old_path, new_path) | |
90 |
|
75 | |||
91 | def delete_all_checkpoints(self, path): |
|
76 | def delete_all_checkpoints(self, path): | |
92 | """Delete all checkpoints for the given path.""" |
|
77 | """Delete all checkpoints for the given path.""" | |
93 | for checkpoint in self.list_checkpoints(path): |
|
78 | for checkpoint in self.list_checkpoints(path): | |
94 | self.delete_checkpoint(checkpoint['id'], path) |
|
79 | self.delete_checkpoint(checkpoint['id'], path) | |
95 |
|
80 | |||
96 |
|
81 | |||
97 | class ContentsManager(LoggingConfigurable): |
|
82 | class ContentsManager(LoggingConfigurable): | |
98 | """Base class for serving files and directories. |
|
83 | """Base class for serving files and directories. | |
99 |
|
84 | |||
100 | This serves any text or binary file, |
|
85 | This serves any text or binary file, | |
101 | as well as directories, |
|
86 | as well as directories, | |
102 | with special handling for JSON notebook documents. |
|
87 | with special handling for JSON notebook documents. | |
103 |
|
88 | |||
104 | Most APIs take a path argument, |
|
89 | Most APIs take a path argument, | |
105 | which is always an API-style unicode path, |
|
90 | which is always an API-style unicode path, | |
106 | and always refers to a directory. |
|
91 | and always refers to a directory. | |
107 |
|
92 | |||
108 | - unicode, not url-escaped |
|
93 | - unicode, not url-escaped | |
109 | - '/'-separated |
|
94 | - '/'-separated | |
110 | - leading and trailing '/' will be stripped |
|
95 | - leading and trailing '/' will be stripped | |
111 | - if unspecified, path defaults to '', |
|
96 | - if unspecified, path defaults to '', | |
112 | indicating the root path. |
|
97 | indicating the root path. | |
113 |
|
98 | |||
114 | """ |
|
99 | """ | |
115 |
|
100 | |||
116 | notary = Instance(sign.NotebookNotary) |
|
101 | notary = Instance(sign.NotebookNotary) | |
117 | def _notary_default(self): |
|
102 | def _notary_default(self): | |
118 | return sign.NotebookNotary(parent=self) |
|
103 | return sign.NotebookNotary(parent=self) | |
119 |
|
104 | |||
120 | hide_globs = List(Unicode, [ |
|
105 | hide_globs = List(Unicode, [ | |
121 | u'__pycache__', '*.pyc', '*.pyo', |
|
106 | u'__pycache__', '*.pyc', '*.pyo', | |
122 | '.DS_Store', '*.so', '*.dylib', '*~', |
|
107 | '.DS_Store', '*.so', '*.dylib', '*~', | |
123 | ], config=True, help=""" |
|
108 | ], config=True, help=""" | |
124 | Glob patterns to hide in file and directory listings. |
|
109 | Glob patterns to hide in file and directory listings. | |
125 | """) |
|
110 | """) | |
126 |
|
111 | |||
127 | untitled_notebook = Unicode("Untitled", config=True, |
|
112 | untitled_notebook = Unicode("Untitled", config=True, | |
128 | help="The base name used when creating untitled notebooks." |
|
113 | help="The base name used when creating untitled notebooks." | |
129 | ) |
|
114 | ) | |
130 |
|
115 | |||
131 | untitled_file = Unicode("untitled", config=True, |
|
116 | untitled_file = Unicode("untitled", config=True, | |
132 | help="The base name used when creating untitled files." |
|
117 | help="The base name used when creating untitled files." | |
133 | ) |
|
118 | ) | |
134 |
|
119 | |||
135 | untitled_directory = Unicode("Untitled Folder", config=True, |
|
120 | untitled_directory = Unicode("Untitled Folder", config=True, | |
136 | help="The base name used when creating untitled directories." |
|
121 | help="The base name used when creating untitled directories." | |
137 | ) |
|
122 | ) | |
138 |
|
123 | |||
139 | pre_save_hook = Any(None, config=True, |
|
124 | pre_save_hook = Any(None, config=True, | |
140 | help="""Python callable or importstring thereof |
|
125 | help="""Python callable or importstring thereof | |
141 |
|
126 | |||
142 | To be called on a contents model prior to save. |
|
127 | To be called on a contents model prior to save. | |
143 |
|
128 | |||
144 | This can be used to process the structure, |
|
129 | This can be used to process the structure, | |
145 | such as removing notebook outputs or other side effects that |
|
130 | such as removing notebook outputs or other side effects that | |
146 | should not be saved. |
|
131 | should not be saved. | |
147 |
|
132 | |||
148 | It will be called as (all arguments passed by keyword): |
|
133 | It will be called as (all arguments passed by keyword): | |
149 |
|
134 | |||
150 | hook(path=path, model=model, contents_manager=self) |
|
135 | hook(path=path, model=model, contents_manager=self) | |
151 |
|
136 | |||
152 | model: the model to be saved. Includes file contents. |
|
137 | model: the model to be saved. Includes file contents. | |
153 | modifying this dict will affect the file that is stored. |
|
138 | modifying this dict will affect the file that is stored. | |
154 | path: the API path of the save destination |
|
139 | path: the API path of the save destination | |
155 | contents_manager: this ContentsManager instance |
|
140 | contents_manager: this ContentsManager instance | |
156 | """ |
|
141 | """ | |
157 | ) |
|
142 | ) | |
158 | def _pre_save_hook_changed(self, name, old, new): |
|
143 | def _pre_save_hook_changed(self, name, old, new): | |
159 | if new and isinstance(new, string_types): |
|
144 | if new and isinstance(new, string_types): | |
160 | self.pre_save_hook = import_item(self.pre_save_hook) |
|
145 | self.pre_save_hook = import_item(self.pre_save_hook) | |
161 | elif new: |
|
146 | elif new: | |
162 | if not callable(new): |
|
147 | if not callable(new): | |
163 | raise TraitError("pre_save_hook must be callable") |
|
148 | raise TraitError("pre_save_hook must be callable") | |
164 |
|
149 | |||
165 | def run_pre_save_hook(self, model, path, **kwargs): |
|
150 | def run_pre_save_hook(self, model, path, **kwargs): | |
166 | """Run the pre-save hook if defined, and log errors""" |
|
151 | """Run the pre-save hook if defined, and log errors""" | |
167 | if self.pre_save_hook: |
|
152 | if self.pre_save_hook: | |
168 | try: |
|
153 | try: | |
169 | self.log.debug("Running pre-save hook on %s", path) |
|
154 | self.log.debug("Running pre-save hook on %s", path) | |
170 | self.pre_save_hook(model=model, path=path, contents_manager=self, **kwargs) |
|
155 | self.pre_save_hook(model=model, path=path, contents_manager=self, **kwargs) | |
171 | except Exception: |
|
156 | except Exception: | |
172 | self.log.error("Pre-save hook failed on %s", path, exc_info=True) |
|
157 | self.log.error("Pre-save hook failed on %s", path, exc_info=True) | |
173 |
|
158 | |||
174 | checkpoint_manager_class = Type(CheckpointManager, config=True) |
|
159 | checkpoint_manager_class = Type(CheckpointManager, config=True) | |
175 | checkpoint_manager = Instance(CheckpointManager, config=True) |
|
160 | checkpoint_manager = Instance(CheckpointManager, config=True) | |
176 | checkpoint_manager_kwargs = Dict(allow_none=False, config=True) |
|
161 | checkpoint_manager_kwargs = Dict(allow_none=False, config=True) | |
177 |
|
162 | |||
178 | def _checkpoint_manager_default(self): |
|
163 | def _checkpoint_manager_default(self): | |
179 | return self.checkpoint_manager_class(**self.checkpoint_manager_kwargs) |
|
164 | return self.checkpoint_manager_class(**self.checkpoint_manager_kwargs) | |
180 |
|
165 | |||
181 | def _checkpoint_manager_kwargs_default(self): |
|
166 | def _checkpoint_manager_kwargs_default(self): | |
182 | return dict( |
|
167 | return dict( | |
183 | parent=self, |
|
168 | parent=self, | |
184 | log=self.log, |
|
169 | log=self.log, | |
185 | ) |
|
170 | ) | |
186 |
|
171 | |||
187 | # ContentsManager API part 1: methods that must be |
|
172 | # ContentsManager API part 1: methods that must be | |
188 | # implemented in subclasses. |
|
173 | # implemented in subclasses. | |
189 |
|
174 | |||
190 | def dir_exists(self, path): |
|
175 | def dir_exists(self, path): | |
191 | """Does the API-style path (directory) actually exist? |
|
176 | """Does the API-style path (directory) actually exist? | |
192 |
|
177 | |||
193 | Like os.path.isdir |
|
178 | Like os.path.isdir | |
194 |
|
179 | |||
195 | Override this method in subclasses. |
|
180 | Override this method in subclasses. | |
196 |
|
181 | |||
197 | Parameters |
|
182 | Parameters | |
198 | ---------- |
|
183 | ---------- | |
199 | path : string |
|
184 | path : string | |
200 | The path to check |
|
185 | The path to check | |
201 |
|
186 | |||
202 | Returns |
|
187 | Returns | |
203 | ------- |
|
188 | ------- | |
204 | exists : bool |
|
189 | exists : bool | |
205 | Whether the path does indeed exist. |
|
190 | Whether the path does indeed exist. | |
206 | """ |
|
191 | """ | |
207 | raise NotImplementedError |
|
192 | raise NotImplementedError | |
208 |
|
193 | |||
209 | def is_hidden(self, path): |
|
194 | def is_hidden(self, path): | |
210 | """Does the API style path correspond to a hidden directory or file? |
|
195 | """Does the API style path correspond to a hidden directory or file? | |
211 |
|
196 | |||
212 | Parameters |
|
197 | Parameters | |
213 | ---------- |
|
198 | ---------- | |
214 | path : string |
|
199 | path : string | |
215 | The path to check. This is an API path (`/` separated, |
|
200 | The path to check. This is an API path (`/` separated, | |
216 | relative to root dir). |
|
201 | relative to root dir). | |
217 |
|
202 | |||
218 | Returns |
|
203 | Returns | |
219 | ------- |
|
204 | ------- | |
220 | hidden : bool |
|
205 | hidden : bool | |
221 | Whether the path is hidden. |
|
206 | Whether the path is hidden. | |
222 |
|
207 | |||
223 | """ |
|
208 | """ | |
224 | raise NotImplementedError |
|
209 | raise NotImplementedError | |
225 |
|
210 | |||
226 | def file_exists(self, path=''): |
|
211 | def file_exists(self, path=''): | |
227 | """Does a file exist at the given path? |
|
212 | """Does a file exist at the given path? | |
228 |
|
213 | |||
229 | Like os.path.isfile |
|
214 | Like os.path.isfile | |
230 |
|
215 | |||
231 | Override this method in subclasses. |
|
216 | Override this method in subclasses. | |
232 |
|
217 | |||
233 | Parameters |
|
218 | Parameters | |
234 | ---------- |
|
219 | ---------- | |
235 | name : string |
|
220 | name : string | |
236 | The name of the file you are checking. |
|
221 | The name of the file you are checking. | |
237 | path : string |
|
222 | path : string | |
238 | The relative path to the file's directory (with '/' as separator) |
|
223 | The relative path to the file's directory (with '/' as separator) | |
239 |
|
224 | |||
240 | Returns |
|
225 | Returns | |
241 | ------- |
|
226 | ------- | |
242 | exists : bool |
|
227 | exists : bool | |
243 | Whether the file exists. |
|
228 | Whether the file exists. | |
244 | """ |
|
229 | """ | |
245 | raise NotImplementedError('must be implemented in a subclass') |
|
230 | raise NotImplementedError('must be implemented in a subclass') | |
246 |
|
231 | |||
247 | def exists(self, path): |
|
232 | def exists(self, path): | |
248 | """Does a file or directory exist at the given path? |
|
233 | """Does a file or directory exist at the given path? | |
249 |
|
234 | |||
250 | Like os.path.exists |
|
235 | Like os.path.exists | |
251 |
|
236 | |||
252 | Parameters |
|
237 | Parameters | |
253 | ---------- |
|
238 | ---------- | |
254 | path : string |
|
239 | path : string | |
255 | The relative path to the file's directory (with '/' as separator) |
|
240 | The relative path to the file's directory (with '/' as separator) | |
256 |
|
241 | |||
257 | Returns |
|
242 | Returns | |
258 | ------- |
|
243 | ------- | |
259 | exists : bool |
|
244 | exists : bool | |
260 | Whether the target exists. |
|
245 | Whether the target exists. | |
261 | """ |
|
246 | """ | |
262 | return self.file_exists(path) or self.dir_exists(path) |
|
247 | return self.file_exists(path) or self.dir_exists(path) | |
263 |
|
248 | |||
264 | def get(self, path, content=True, type=None, format=None): |
|
249 | def get(self, path, content=True, type=None, format=None): | |
265 | """Get the model of a file or directory with or without content.""" |
|
250 | """Get the model of a file or directory with or without content.""" | |
266 | raise NotImplementedError('must be implemented in a subclass') |
|
251 | raise NotImplementedError('must be implemented in a subclass') | |
267 |
|
252 | |||
268 | def save(self, model, path): |
|
253 | def save(self, model, path): | |
269 | """Save the file or directory and return the model with no content. |
|
254 | """Save the file or directory and return the model with no content. | |
270 |
|
255 | |||
271 | Save implementations should call self.run_pre_save_hook(model=model, path=path) |
|
256 | Save implementations should call self.run_pre_save_hook(model=model, path=path) | |
272 | prior to writing any data. |
|
257 | prior to writing any data. | |
273 | """ |
|
258 | """ | |
274 | raise NotImplementedError('must be implemented in a subclass') |
|
259 | raise NotImplementedError('must be implemented in a subclass') | |
275 |
|
260 | |||
276 | def delete_file(self, path): |
|
261 | def delete_file(self, path): | |
277 | """Delete file or directory by path.""" |
|
262 | """Delete file or directory by path.""" | |
278 | raise NotImplementedError('must be implemented in a subclass') |
|
263 | raise NotImplementedError('must be implemented in a subclass') | |
279 |
|
264 | |||
280 | def rename_file(self, old_path, new_path): |
|
265 | def rename_file(self, old_path, new_path): | |
281 | """Rename a file.""" |
|
266 | """Rename a file.""" | |
282 | raise NotImplementedError('must be implemented in a subclass') |
|
267 | raise NotImplementedError('must be implemented in a subclass') | |
283 |
|
268 | |||
284 | # ContentsManager API part 2: methods that have useable default |
|
269 | # ContentsManager API part 2: methods that have useable default | |
285 | # implementations, but can be overridden in subclasses. |
|
270 | # implementations, but can be overridden in subclasses. | |
286 |
|
271 | |||
287 | def delete(self, path): |
|
272 | def delete(self, path): | |
288 | """Delete a file/directory and any associated checkpoints.""" |
|
273 | """Delete a file/directory and any associated checkpoints.""" | |
289 | self.delete_file(path) |
|
274 | self.delete_file(path) | |
290 | self.checkpoint_manager.delete_all_checkpoints(path) |
|
275 | self.checkpoint_manager.delete_all_checkpoints(path) | |
291 |
|
276 | |||
292 | def rename(self, old_path, new_path): |
|
277 | def rename(self, old_path, new_path): | |
293 | """Rename a file and any checkpoints associated with that file.""" |
|
278 | """Rename a file and any checkpoints associated with that file.""" | |
294 | self.rename_file(old_path, new_path) |
|
279 | self.rename_file(old_path, new_path) | |
295 | self.checkpoint_manager.rename_all_checkpoints(old_path, new_path) |
|
280 | self.checkpoint_manager.rename_all_checkpoints(old_path, new_path) | |
296 |
|
281 | |||
297 | def update(self, model, path): |
|
282 | def update(self, model, path): | |
298 | """Update the file's path |
|
283 | """Update the file's path | |
299 |
|
284 | |||
300 | For use in PATCH requests, to enable renaming a file without |
|
285 | For use in PATCH requests, to enable renaming a file without | |
301 | re-uploading its contents. Only used for renaming at the moment. |
|
286 | re-uploading its contents. Only used for renaming at the moment. | |
302 | """ |
|
287 | """ | |
303 | path = path.strip('/') |
|
288 | path = path.strip('/') | |
304 | new_path = model.get('path', path).strip('/') |
|
289 | new_path = model.get('path', path).strip('/') | |
305 | if path != new_path: |
|
290 | if path != new_path: | |
306 | self.rename(path, new_path) |
|
291 | self.rename(path, new_path) | |
307 | model = self.get(new_path, content=False) |
|
292 | model = self.get(new_path, content=False) | |
308 | return model |
|
293 | return model | |
309 |
|
294 | |||
310 | def info_string(self): |
|
295 | def info_string(self): | |
311 | return "Serving contents" |
|
296 | return "Serving contents" | |
312 |
|
297 | |||
313 | def get_kernel_path(self, path, model=None): |
|
298 | def get_kernel_path(self, path, model=None): | |
314 | """Return the API path for the kernel |
|
299 | """Return the API path for the kernel | |
315 |
|
300 | |||
316 | KernelManagers can turn this value into a filesystem path, |
|
301 | KernelManagers can turn this value into a filesystem path, | |
317 | or ignore it altogether. |
|
302 | or ignore it altogether. | |
318 |
|
303 | |||
319 | The default value here will start kernels in the directory of the |
|
304 | The default value here will start kernels in the directory of the | |
320 | notebook server. FileContentsManager overrides this to use the |
|
305 | notebook server. FileContentsManager overrides this to use the | |
321 | directory containing the notebook. |
|
306 | directory containing the notebook. | |
322 | """ |
|
307 | """ | |
323 | return '' |
|
308 | return '' | |
324 |
|
309 | |||
325 | def increment_filename(self, filename, path='', insert=''): |
|
310 | def increment_filename(self, filename, path='', insert=''): | |
326 | """Increment a filename until it is unique. |
|
311 | """Increment a filename until it is unique. | |
327 |
|
312 | |||
328 | Parameters |
|
313 | Parameters | |
329 | ---------- |
|
314 | ---------- | |
330 | filename : unicode |
|
315 | filename : unicode | |
331 | The name of a file, including extension |
|
316 | The name of a file, including extension | |
332 | path : unicode |
|
317 | path : unicode | |
333 | The API path of the target's directory |
|
318 | The API path of the target's directory | |
334 |
|
319 | |||
335 | Returns |
|
320 | Returns | |
336 | ------- |
|
321 | ------- | |
337 | name : unicode |
|
322 | name : unicode | |
338 | A filename that is unique, based on the input filename. |
|
323 | A filename that is unique, based on the input filename. | |
339 | """ |
|
324 | """ | |
340 | path = path.strip('/') |
|
325 | path = path.strip('/') | |
341 | basename, ext = os.path.splitext(filename) |
|
326 | basename, ext = os.path.splitext(filename) | |
342 | for i in itertools.count(): |
|
327 | for i in itertools.count(): | |
343 | if i: |
|
328 | if i: | |
344 | insert_i = '{}{}'.format(insert, i) |
|
329 | insert_i = '{}{}'.format(insert, i) | |
345 | else: |
|
330 | else: | |
346 | insert_i = '' |
|
331 | insert_i = '' | |
347 | name = u'{basename}{insert}{ext}'.format(basename=basename, |
|
332 | name = u'{basename}{insert}{ext}'.format(basename=basename, | |
348 | insert=insert_i, ext=ext) |
|
333 | insert=insert_i, ext=ext) | |
349 | if not self.exists(u'{}/{}'.format(path, name)): |
|
334 | if not self.exists(u'{}/{}'.format(path, name)): | |
350 | break |
|
335 | break | |
351 | return name |
|
336 | return name | |
352 |
|
337 | |||
353 | def validate_notebook_model(self, model): |
|
338 | def validate_notebook_model(self, model): | |
354 | """Add failed-validation message to model""" |
|
339 | """Add failed-validation message to model""" | |
355 | try: |
|
340 | try: | |
356 | validate(model['content']) |
|
341 | validate(model['content']) | |
357 | except ValidationError as e: |
|
342 | except ValidationError as e: | |
358 | model['message'] = u'Notebook Validation failed: {}:\n{}'.format( |
|
343 | model['message'] = u'Notebook Validation failed: {}:\n{}'.format( | |
359 | e.message, json.dumps(e.instance, indent=1, default=lambda obj: '<UNKNOWN>'), |
|
344 | e.message, json.dumps(e.instance, indent=1, default=lambda obj: '<UNKNOWN>'), | |
360 | ) |
|
345 | ) | |
361 | return model |
|
346 | return model | |
362 |
|
347 | |||
363 | def new_untitled(self, path='', type='', ext=''): |
|
348 | def new_untitled(self, path='', type='', ext=''): | |
364 | """Create a new untitled file or directory in path |
|
349 | """Create a new untitled file or directory in path | |
365 |
|
350 | |||
366 | path must be a directory |
|
351 | path must be a directory | |
367 |
|
352 | |||
368 | File extension can be specified. |
|
353 | File extension can be specified. | |
369 |
|
354 | |||
370 | Use `new` to create files with a fully specified path (including filename). |
|
355 | Use `new` to create files with a fully specified path (including filename). | |
371 | """ |
|
356 | """ | |
372 | path = path.strip('/') |
|
357 | path = path.strip('/') | |
373 | if not self.dir_exists(path): |
|
358 | if not self.dir_exists(path): | |
374 | raise HTTPError(404, 'No such directory: %s' % path) |
|
359 | raise HTTPError(404, 'No such directory: %s' % path) | |
375 |
|
360 | |||
376 | model = {} |
|
361 | model = {} | |
377 | if type: |
|
362 | if type: | |
378 | model['type'] = type |
|
363 | model['type'] = type | |
379 |
|
364 | |||
380 | if ext == '.ipynb': |
|
365 | if ext == '.ipynb': | |
381 | model.setdefault('type', 'notebook') |
|
366 | model.setdefault('type', 'notebook') | |
382 | else: |
|
367 | else: | |
383 | model.setdefault('type', 'file') |
|
368 | model.setdefault('type', 'file') | |
384 |
|
369 | |||
385 | insert = '' |
|
370 | insert = '' | |
386 | if model['type'] == 'directory': |
|
371 | if model['type'] == 'directory': | |
387 | untitled = self.untitled_directory |
|
372 | untitled = self.untitled_directory | |
388 | insert = ' ' |
|
373 | insert = ' ' | |
389 | elif model['type'] == 'notebook': |
|
374 | elif model['type'] == 'notebook': | |
390 | untitled = self.untitled_notebook |
|
375 | untitled = self.untitled_notebook | |
391 | ext = '.ipynb' |
|
376 | ext = '.ipynb' | |
392 | elif model['type'] == 'file': |
|
377 | elif model['type'] == 'file': | |
393 | untitled = self.untitled_file |
|
378 | untitled = self.untitled_file | |
394 | else: |
|
379 | else: | |
395 | raise HTTPError(400, "Unexpected model type: %r" % model['type']) |
|
380 | raise HTTPError(400, "Unexpected model type: %r" % model['type']) | |
396 |
|
381 | |||
397 | name = self.increment_filename(untitled + ext, path, insert=insert) |
|
382 | name = self.increment_filename(untitled + ext, path, insert=insert) | |
398 | path = u'{0}/{1}'.format(path, name) |
|
383 | path = u'{0}/{1}'.format(path, name) | |
399 | return self.new(model, path) |
|
384 | return self.new(model, path) | |
400 |
|
385 | |||
401 | def new(self, model=None, path=''): |
|
386 | def new(self, model=None, path=''): | |
402 | """Create a new file or directory and return its model with no content. |
|
387 | """Create a new file or directory and return its model with no content. | |
403 |
|
388 | |||
404 | To create a new untitled entity in a directory, use `new_untitled`. |
|
389 | To create a new untitled entity in a directory, use `new_untitled`. | |
405 | """ |
|
390 | """ | |
406 | path = path.strip('/') |
|
391 | path = path.strip('/') | |
407 | if model is None: |
|
392 | if model is None: | |
408 | model = {} |
|
393 | model = {} | |
409 |
|
394 | |||
410 | if path.endswith('.ipynb'): |
|
395 | if path.endswith('.ipynb'): | |
411 | model.setdefault('type', 'notebook') |
|
396 | model.setdefault('type', 'notebook') | |
412 | else: |
|
397 | else: | |
413 | model.setdefault('type', 'file') |
|
398 | model.setdefault('type', 'file') | |
414 |
|
399 | |||
415 | # no content, not a directory, so fill out new-file model |
|
400 | # no content, not a directory, so fill out new-file model | |
416 | if 'content' not in model and model['type'] != 'directory': |
|
401 | if 'content' not in model and model['type'] != 'directory': | |
417 | if model['type'] == 'notebook': |
|
402 | if model['type'] == 'notebook': | |
418 | model['content'] = new_notebook() |
|
403 | model['content'] = new_notebook() | |
419 | model['format'] = 'json' |
|
404 | model['format'] = 'json' | |
420 | else: |
|
405 | else: | |
421 | model['content'] = '' |
|
406 | model['content'] = '' | |
422 | model['type'] = 'file' |
|
407 | model['type'] = 'file' | |
423 | model['format'] = 'text' |
|
408 | model['format'] = 'text' | |
424 |
|
409 | |||
425 | model = self.save(model, path) |
|
410 | model = self.save(model, path) | |
426 | return model |
|
411 | return model | |
427 |
|
412 | |||
428 | def copy(self, from_path, to_path=None): |
|
413 | def copy(self, from_path, to_path=None): | |
429 | """Copy an existing file and return its new model. |
|
414 | """Copy an existing file and return its new model. | |
430 |
|
415 | |||
431 | If to_path not specified, it will be the parent directory of from_path. |
|
416 | If to_path not specified, it will be the parent directory of from_path. | |
432 | If to_path is a directory, filename will increment `from_path-Copy#.ext`. |
|
417 | If to_path is a directory, filename will increment `from_path-Copy#.ext`. | |
433 |
|
418 | |||
434 | from_path must be a full path to a file. |
|
419 | from_path must be a full path to a file. | |
435 | """ |
|
420 | """ | |
436 | path = from_path.strip('/') |
|
421 | path = from_path.strip('/') | |
437 | if to_path is not None: |
|
422 | if to_path is not None: | |
438 | to_path = to_path.strip('/') |
|
423 | to_path = to_path.strip('/') | |
439 |
|
424 | |||
440 | if '/' in path: |
|
425 | if '/' in path: | |
441 | from_dir, from_name = path.rsplit('/', 1) |
|
426 | from_dir, from_name = path.rsplit('/', 1) | |
442 | else: |
|
427 | else: | |
443 | from_dir = '' |
|
428 | from_dir = '' | |
444 | from_name = path |
|
429 | from_name = path | |
445 |
|
430 | |||
446 | model = self.get(path) |
|
431 | model = self.get(path) | |
447 | model.pop('path', None) |
|
432 | model.pop('path', None) | |
448 | model.pop('name', None) |
|
433 | model.pop('name', None) | |
449 | if model['type'] == 'directory': |
|
434 | if model['type'] == 'directory': | |
450 | raise HTTPError(400, "Can't copy directories") |
|
435 | raise HTTPError(400, "Can't copy directories") | |
451 |
|
436 | |||
452 | if to_path is None: |
|
437 | if to_path is None: | |
453 | to_path = from_dir |
|
438 | to_path = from_dir | |
454 | if self.dir_exists(to_path): |
|
439 | if self.dir_exists(to_path): | |
455 | name = copy_pat.sub(u'.', from_name) |
|
440 | name = copy_pat.sub(u'.', from_name) | |
456 | to_name = self.increment_filename(name, to_path, insert='-Copy') |
|
441 | to_name = self.increment_filename(name, to_path, insert='-Copy') | |
457 | to_path = u'{0}/{1}'.format(to_path, to_name) |
|
442 | to_path = u'{0}/{1}'.format(to_path, to_name) | |
458 |
|
443 | |||
459 | model = self.save(model, to_path) |
|
444 | model = self.save(model, to_path) | |
460 | return model |
|
445 | return model | |
461 |
|
446 | |||
462 | def log_info(self): |
|
447 | def log_info(self): | |
463 | self.log.info(self.info_string()) |
|
448 | self.log.info(self.info_string()) | |
464 |
|
449 | |||
465 | def trust_notebook(self, path): |
|
450 | def trust_notebook(self, path): | |
466 | """Explicitly trust a notebook |
|
451 | """Explicitly trust a notebook | |
467 |
|
452 | |||
468 | Parameters |
|
453 | Parameters | |
469 | ---------- |
|
454 | ---------- | |
470 | path : string |
|
455 | path : string | |
471 | The path of a notebook |
|
456 | The path of a notebook | |
472 | """ |
|
457 | """ | |
473 | model = self.get(path) |
|
458 | model = self.get(path) | |
474 | nb = model['content'] |
|
459 | nb = model['content'] | |
475 | self.log.warn("Trusting notebook %s", path) |
|
460 | self.log.warn("Trusting notebook %s", path) | |
476 | self.notary.mark_cells(nb, True) |
|
461 | self.notary.mark_cells(nb, True) | |
477 | self.save(model, path) |
|
462 | self.save(model, path) | |
478 |
|
463 | |||
479 | def check_and_sign(self, nb, path=''): |
|
464 | def check_and_sign(self, nb, path=''): | |
480 | """Check for trusted cells, and sign the notebook. |
|
465 | """Check for trusted cells, and sign the notebook. | |
481 |
|
466 | |||
482 | Called as a part of saving notebooks. |
|
467 | Called as a part of saving notebooks. | |
483 |
|
468 | |||
484 | Parameters |
|
469 | Parameters | |
485 | ---------- |
|
470 | ---------- | |
486 | nb : dict |
|
471 | nb : dict | |
487 | The notebook dict |
|
472 | The notebook dict | |
488 | path : string |
|
473 | path : string | |
489 | The notebook's path (for logging) |
|
474 | The notebook's path (for logging) | |
490 | """ |
|
475 | """ | |
491 | if self.notary.check_cells(nb): |
|
476 | if self.notary.check_cells(nb): | |
492 | self.notary.sign(nb) |
|
477 | self.notary.sign(nb) | |
493 | else: |
|
478 | else: | |
494 | self.log.warn("Saving untrusted notebook %s", path) |
|
479 | self.log.warn("Saving untrusted notebook %s", path) | |
495 |
|
480 | |||
496 | def mark_trusted_cells(self, nb, path=''): |
|
481 | def mark_trusted_cells(self, nb, path=''): | |
497 | """Mark cells as trusted if the notebook signature matches. |
|
482 | """Mark cells as trusted if the notebook signature matches. | |
498 |
|
483 | |||
499 | Called as a part of loading notebooks. |
|
484 | Called as a part of loading notebooks. | |
500 |
|
485 | |||
501 | Parameters |
|
486 | Parameters | |
502 | ---------- |
|
487 | ---------- | |
503 | nb : dict |
|
488 | nb : dict | |
504 | The notebook object (in current nbformat) |
|
489 | The notebook object (in current nbformat) | |
505 | path : string |
|
490 | path : string | |
506 | The notebook's path (for logging) |
|
491 | The notebook's path (for logging) | |
507 | """ |
|
492 | """ | |
508 | trusted = self.notary.check_signature(nb) |
|
493 | trusted = self.notary.check_signature(nb) | |
509 | if not trusted: |
|
494 | if not trusted: | |
510 | self.log.warn("Notebook %s is not trusted", path) |
|
495 | self.log.warn("Notebook %s is not trusted", path) | |
511 | self.notary.mark_cells(nb, trusted) |
|
496 | self.notary.mark_cells(nb, trusted) | |
512 |
|
497 | |||
513 | def should_list(self, name): |
|
498 | def should_list(self, name): | |
514 | """Should this file/directory name be displayed in a listing?""" |
|
499 | """Should this file/directory name be displayed in a listing?""" | |
515 | return not any(fnmatch(name, glob) for glob in self.hide_globs) |
|
500 | return not any(fnmatch(name, glob) for glob in self.hide_globs) | |
516 |
|
501 | |||
517 | def walk(self): |
|
|||
518 | """ |
|
|||
519 | Like os.walk, but written in terms of the ContentsAPI. |
|
|||
520 |
|
||||
521 | Returns a generator of tuples of the form: |
|
|||
522 | (directory name, [subdirectories], [files in directory]) |
|
|||
523 | """ |
|
|||
524 | return self._walk(['']) |
|
|||
525 |
|
||||
526 | def _walk(self, dirs): |
|
|||
527 | """ |
|
|||
528 | Recursive helper for walk. |
|
|||
529 | """ |
|
|||
530 | for directory in dirs: |
|
|||
531 | children = self.get( |
|
|||
532 | directory, |
|
|||
533 | content=True, |
|
|||
534 | type='directory', |
|
|||
535 | )['content'] |
|
|||
536 | dirs, files = map(sorted, _separate_dirs_files(children)) |
|
|||
537 | yield (directory, dirs, files) |
|
|||
538 | if dirs: |
|
|||
539 | for entry in self._walk(dirs): |
|
|||
540 | yield(entry) |
|
|||
541 |
|
||||
542 | # Part 3: Checkpoints API |
|
502 | # Part 3: Checkpoints API | |
543 | def create_checkpoint(self, path): |
|
503 | def create_checkpoint(self, path): | |
544 | """Create a checkpoint.""" |
|
504 | """Create a checkpoint.""" | |
545 | model = self.get(path, content=True) |
|
505 | model = self.get(path, content=True) | |
546 | type = model['type'] |
|
506 | type = model['type'] | |
547 | if type == 'notebook': |
|
507 | if type == 'notebook': | |
548 | return self.checkpoint_manager.create_notebook_checkpoint( |
|
508 | return self.checkpoint_manager.create_notebook_checkpoint( | |
549 | model['content'], |
|
509 | model['content'], | |
550 | path, |
|
510 | path, | |
551 | ) |
|
511 | ) | |
552 | elif type == 'file': |
|
512 | elif type == 'file': | |
553 | return self.checkpoint_manager.create_file_checkpoint( |
|
513 | return self.checkpoint_manager.create_file_checkpoint( | |
554 | model['content'], |
|
514 | model['content'], | |
555 | model['format'], |
|
515 | model['format'], | |
556 | path, |
|
516 | path, | |
557 | ) |
|
517 | ) | |
558 |
|
518 | |||
559 | def list_checkpoints(self, path): |
|
519 | def list_checkpoints(self, path): | |
560 | return self.checkpoint_manager.list_checkpoints(path) |
|
520 | return self.checkpoint_manager.list_checkpoints(path) | |
561 |
|
521 | |||
562 | def restore_checkpoint(self, checkpoint_id, path): |
|
522 | def restore_checkpoint(self, checkpoint_id, path): | |
563 | """ |
|
523 | """ | |
564 | Restore a checkpoint. |
|
524 | Restore a checkpoint. | |
565 | """ |
|
525 | """ | |
566 | return self.save( |
|
526 | return self.save( | |
567 | model=self.checkpoint_manager.get_checkpoint( |
|
527 | model=self.checkpoint_manager.get_checkpoint( | |
568 | checkpoint_id, |
|
528 | checkpoint_id, | |
569 | path, |
|
529 | path, | |
570 | self.get(path, content=False)['type'] |
|
530 | self.get(path, content=False)['type'] | |
571 | ), |
|
531 | ), | |
572 | path=path, |
|
532 | path=path, | |
573 | ) |
|
533 | ) | |
574 |
|
534 | |||
575 | def delete_checkpoint(self, checkpoint_id, path): |
|
535 | def delete_checkpoint(self, checkpoint_id, path): | |
576 | return self.checkpoint_manager.delete_checkpoint(checkpoint_id, path) |
|
536 | return self.checkpoint_manager.delete_checkpoint(checkpoint_id, path) |
@@ -1,695 +1,616 b'' | |||||
1 | # coding: utf-8 |
|
1 | # coding: utf-8 | |
2 | """Test the contents webservice API.""" |
|
2 | """Test the contents webservice API.""" | |
3 |
|
3 | |||
4 | import base64 |
|
4 | import base64 | |
5 | from contextlib import contextmanager |
|
5 | from contextlib import contextmanager | |
6 | import io |
|
6 | import io | |
7 | import json |
|
7 | import json | |
8 | import os |
|
8 | import os | |
9 | import shutil |
|
9 | import shutil | |
10 | from unicodedata import normalize |
|
10 | from unicodedata import normalize | |
11 |
|
11 | |||
12 | pjoin = os.path.join |
|
12 | pjoin = os.path.join | |
13 |
|
13 | |||
14 | import requests |
|
14 | import requests | |
15 |
|
15 | |||
16 | from IPython.html.utils import url_path_join, url_escape, to_os_path |
|
16 | from IPython.html.utils import url_path_join, url_escape, to_os_path | |
17 | from IPython.html.tests.launchnotebook import NotebookTestBase, assert_http_error |
|
17 | from IPython.html.tests.launchnotebook import NotebookTestBase, assert_http_error | |
18 | from IPython.nbformat import read, write, from_dict |
|
18 | from IPython.nbformat import read, write, from_dict | |
19 | from IPython.nbformat.v4 import ( |
|
19 | from IPython.nbformat.v4 import ( | |
20 | new_notebook, new_markdown_cell, |
|
20 | new_notebook, new_markdown_cell, | |
21 | ) |
|
21 | ) | |
22 | from IPython.nbformat import v2 |
|
22 | from IPython.nbformat import v2 | |
23 | from IPython.utils import py3compat |
|
23 | from IPython.utils import py3compat | |
24 | from IPython.utils.data import uniq_stable |
|
24 | from IPython.utils.data import uniq_stable | |
25 | from IPython.utils.tempdir import TemporaryDirectory |
|
25 | from IPython.utils.tempdir import TemporaryDirectory | |
26 |
|
26 | |||
27 |
|
27 | |||
28 | def notebooks_only(dir_model): |
|
28 | def notebooks_only(dir_model): | |
29 | return [nb for nb in dir_model['content'] if nb['type']=='notebook'] |
|
29 | return [nb for nb in dir_model['content'] if nb['type']=='notebook'] | |
30 |
|
30 | |||
31 | def dirs_only(dir_model): |
|
31 | def dirs_only(dir_model): | |
32 | return [x for x in dir_model['content'] if x['type']=='directory'] |
|
32 | return [x for x in dir_model['content'] if x['type']=='directory'] | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | class API(object): |
|
35 | class API(object): | |
36 | """Wrapper for contents API calls.""" |
|
36 | """Wrapper for contents API calls.""" | |
37 | def __init__(self, base_url): |
|
37 | def __init__(self, base_url): | |
38 | self.base_url = base_url |
|
38 | self.base_url = base_url | |
39 |
|
39 | |||
40 | def _req(self, verb, path, body=None, params=None): |
|
40 | def _req(self, verb, path, body=None, params=None): | |
41 | response = requests.request(verb, |
|
41 | response = requests.request(verb, | |
42 | url_path_join(self.base_url, 'api/contents', path), |
|
42 | url_path_join(self.base_url, 'api/contents', path), | |
43 | data=body, params=params, |
|
43 | data=body, params=params, | |
44 | ) |
|
44 | ) | |
45 | response.raise_for_status() |
|
45 | response.raise_for_status() | |
46 | return response |
|
46 | return response | |
47 |
|
47 | |||
48 | def list(self, path='/'): |
|
48 | def list(self, path='/'): | |
49 | return self._req('GET', path) |
|
49 | return self._req('GET', path) | |
50 |
|
50 | |||
51 | def read(self, path, type=None, format=None): |
|
51 | def read(self, path, type=None, format=None): | |
52 | params = {} |
|
52 | params = {} | |
53 | if type is not None: |
|
53 | if type is not None: | |
54 | params['type'] = type |
|
54 | params['type'] = type | |
55 | if format is not None: |
|
55 | if format is not None: | |
56 | params['format'] = format |
|
56 | params['format'] = format | |
57 | return self._req('GET', path, params=params) |
|
57 | return self._req('GET', path, params=params) | |
58 |
|
58 | |||
59 | def create_untitled(self, path='/', ext='.ipynb'): |
|
59 | def create_untitled(self, path='/', ext='.ipynb'): | |
60 | body = None |
|
60 | body = None | |
61 | if ext: |
|
61 | if ext: | |
62 | body = json.dumps({'ext': ext}) |
|
62 | body = json.dumps({'ext': ext}) | |
63 | return self._req('POST', path, body) |
|
63 | return self._req('POST', path, body) | |
64 |
|
64 | |||
65 | def mkdir_untitled(self, path='/'): |
|
65 | def mkdir_untitled(self, path='/'): | |
66 | return self._req('POST', path, json.dumps({'type': 'directory'})) |
|
66 | return self._req('POST', path, json.dumps({'type': 'directory'})) | |
67 |
|
67 | |||
68 | def copy(self, copy_from, path='/'): |
|
68 | def copy(self, copy_from, path='/'): | |
69 | body = json.dumps({'copy_from':copy_from}) |
|
69 | body = json.dumps({'copy_from':copy_from}) | |
70 | return self._req('POST', path, body) |
|
70 | return self._req('POST', path, body) | |
71 |
|
71 | |||
72 | def create(self, path='/'): |
|
72 | def create(self, path='/'): | |
73 | return self._req('PUT', path) |
|
73 | return self._req('PUT', path) | |
74 |
|
74 | |||
75 | def upload(self, path, body): |
|
75 | def upload(self, path, body): | |
76 | return self._req('PUT', path, body) |
|
76 | return self._req('PUT', path, body) | |
77 |
|
77 | |||
78 | def mkdir(self, path='/'): |
|
78 | def mkdir(self, path='/'): | |
79 | return self._req('PUT', path, json.dumps({'type': 'directory'})) |
|
79 | return self._req('PUT', path, json.dumps({'type': 'directory'})) | |
80 |
|
80 | |||
81 | def copy_put(self, copy_from, path='/'): |
|
81 | def copy_put(self, copy_from, path='/'): | |
82 | body = json.dumps({'copy_from':copy_from}) |
|
82 | body = json.dumps({'copy_from':copy_from}) | |
83 | return self._req('PUT', path, body) |
|
83 | return self._req('PUT', path, body) | |
84 |
|
84 | |||
85 | def save(self, path, body): |
|
85 | def save(self, path, body): | |
86 | return self._req('PUT', path, body) |
|
86 | return self._req('PUT', path, body) | |
87 |
|
87 | |||
88 | def delete(self, path='/'): |
|
88 | def delete(self, path='/'): | |
89 | return self._req('DELETE', path) |
|
89 | return self._req('DELETE', path) | |
90 |
|
90 | |||
91 | def rename(self, path, new_path): |
|
91 | def rename(self, path, new_path): | |
92 | body = json.dumps({'path': new_path}) |
|
92 | body = json.dumps({'path': new_path}) | |
93 | return self._req('PATCH', path, body) |
|
93 | return self._req('PATCH', path, body) | |
94 |
|
94 | |||
95 | def get_checkpoints(self, path): |
|
95 | def get_checkpoints(self, path): | |
96 | return self._req('GET', url_path_join(path, 'checkpoints')) |
|
96 | return self._req('GET', url_path_join(path, 'checkpoints')) | |
97 |
|
97 | |||
98 | def new_checkpoint(self, path): |
|
98 | def new_checkpoint(self, path): | |
99 | return self._req('POST', url_path_join(path, 'checkpoints')) |
|
99 | return self._req('POST', url_path_join(path, 'checkpoints')) | |
100 |
|
100 | |||
101 | def restore_checkpoint(self, path, checkpoint_id): |
|
101 | def restore_checkpoint(self, path, checkpoint_id): | |
102 | return self._req('POST', url_path_join(path, 'checkpoints', checkpoint_id)) |
|
102 | return self._req('POST', url_path_join(path, 'checkpoints', checkpoint_id)) | |
103 |
|
103 | |||
104 | def delete_checkpoint(self, path, checkpoint_id): |
|
104 | def delete_checkpoint(self, path, checkpoint_id): | |
105 | return self._req('DELETE', url_path_join(path, 'checkpoints', checkpoint_id)) |
|
105 | return self._req('DELETE', url_path_join(path, 'checkpoints', checkpoint_id)) | |
106 |
|
106 | |||
107 | class APITest(NotebookTestBase): |
|
107 | class APITest(NotebookTestBase): | |
108 | """Test the kernels web service API""" |
|
108 | """Test the kernels web service API""" | |
109 | dirs_nbs = [('', 'inroot'), |
|
109 | dirs_nbs = [('', 'inroot'), | |
110 | ('Directory with spaces in', 'inspace'), |
|
110 | ('Directory with spaces in', 'inspace'), | |
111 | (u'unicodé', 'innonascii'), |
|
111 | (u'unicodé', 'innonascii'), | |
112 | ('foo', 'a'), |
|
112 | ('foo', 'a'), | |
113 | ('foo', 'b'), |
|
113 | ('foo', 'b'), | |
114 | ('foo', 'name with spaces'), |
|
114 | ('foo', 'name with spaces'), | |
115 | ('foo', u'unicodé'), |
|
115 | ('foo', u'unicodé'), | |
116 | ('foo/bar', 'baz'), |
|
116 | ('foo/bar', 'baz'), | |
117 | ('ordering', 'A'), |
|
117 | ('ordering', 'A'), | |
118 | ('ordering', 'b'), |
|
118 | ('ordering', 'b'), | |
119 | ('ordering', 'C'), |
|
119 | ('ordering', 'C'), | |
120 | (u'å b', u'ç d'), |
|
120 | (u'å b', u'ç d'), | |
121 | ] |
|
121 | ] | |
122 | hidden_dirs = ['.hidden', '__pycache__'] |
|
122 | hidden_dirs = ['.hidden', '__pycache__'] | |
123 |
|
123 | |||
124 | # Don't include root dir. |
|
124 | # Don't include root dir. | |
125 | dirs = uniq_stable([py3compat.cast_unicode(d) for (d,n) in dirs_nbs[1:]]) |
|
125 | dirs = uniq_stable([py3compat.cast_unicode(d) for (d,n) in dirs_nbs[1:]]) | |
126 | top_level_dirs = {normalize('NFC', d.split('/')[0]) for d in dirs} |
|
126 | top_level_dirs = {normalize('NFC', d.split('/')[0]) for d in dirs} | |
127 |
|
127 | |||
128 | @staticmethod |
|
128 | @staticmethod | |
129 | def _blob_for_name(name): |
|
129 | def _blob_for_name(name): | |
130 | return name.encode('utf-8') + b'\xFF' |
|
130 | return name.encode('utf-8') + b'\xFF' | |
131 |
|
131 | |||
132 | @staticmethod |
|
132 | @staticmethod | |
133 | def _txt_for_name(name): |
|
133 | def _txt_for_name(name): | |
134 | return u'%s text file' % name |
|
134 | return u'%s text file' % name | |
135 |
|
135 | |||
136 | def to_os_path(self, api_path): |
|
136 | def to_os_path(self, api_path): | |
137 | return to_os_path(api_path, root=self.notebook_dir.name) |
|
137 | return to_os_path(api_path, root=self.notebook_dir.name) | |
138 |
|
138 | |||
139 | def make_dir(self, api_path): |
|
139 | def make_dir(self, api_path): | |
140 | """Create a directory at api_path""" |
|
140 | """Create a directory at api_path""" | |
141 | os_path = self.to_os_path(api_path) |
|
141 | os_path = self.to_os_path(api_path) | |
142 | try: |
|
142 | try: | |
143 | os.makedirs(os_path) |
|
143 | os.makedirs(os_path) | |
144 | except OSError: |
|
144 | except OSError: | |
145 | print("Directory already exists: %r" % os_path) |
|
145 | print("Directory already exists: %r" % os_path) | |
146 |
|
146 | |||
147 | def make_txt(self, api_path, txt): |
|
147 | def make_txt(self, api_path, txt): | |
148 | """Make a text file at a given api_path""" |
|
148 | """Make a text file at a given api_path""" | |
149 | os_path = self.to_os_path(api_path) |
|
149 | os_path = self.to_os_path(api_path) | |
150 | with io.open(os_path, 'w', encoding='utf-8') as f: |
|
150 | with io.open(os_path, 'w', encoding='utf-8') as f: | |
151 | f.write(txt) |
|
151 | f.write(txt) | |
152 |
|
152 | |||
153 | def make_blob(self, api_path, blob): |
|
153 | def make_blob(self, api_path, blob): | |
154 | """Make a binary file at a given api_path""" |
|
154 | """Make a binary file at a given api_path""" | |
155 | os_path = self.to_os_path(api_path) |
|
155 | os_path = self.to_os_path(api_path) | |
156 | with io.open(os_path, 'wb') as f: |
|
156 | with io.open(os_path, 'wb') as f: | |
157 | f.write(blob) |
|
157 | f.write(blob) | |
158 |
|
158 | |||
159 | def make_nb(self, api_path, nb): |
|
159 | def make_nb(self, api_path, nb): | |
160 | """Make a notebook file at a given api_path""" |
|
160 | """Make a notebook file at a given api_path""" | |
161 | os_path = self.to_os_path(api_path) |
|
161 | os_path = self.to_os_path(api_path) | |
162 |
|
162 | |||
163 | with io.open(os_path, 'w', encoding='utf-8') as f: |
|
163 | with io.open(os_path, 'w', encoding='utf-8') as f: | |
164 | write(nb, f, version=4) |
|
164 | write(nb, f, version=4) | |
165 |
|
165 | |||
166 | def delete_dir(self, api_path): |
|
166 | def delete_dir(self, api_path): | |
167 | """Delete a directory at api_path, removing any contents.""" |
|
167 | """Delete a directory at api_path, removing any contents.""" | |
168 | os_path = self.to_os_path(api_path) |
|
168 | os_path = self.to_os_path(api_path) | |
169 | shutil.rmtree(os_path, ignore_errors=True) |
|
169 | shutil.rmtree(os_path, ignore_errors=True) | |
170 |
|
170 | |||
171 | def delete_file(self, api_path): |
|
171 | def delete_file(self, api_path): | |
172 | """Delete a file at the given path if it exists.""" |
|
172 | """Delete a file at the given path if it exists.""" | |
173 | if self.isfile(api_path): |
|
173 | if self.isfile(api_path): | |
174 | os.unlink(self.to_os_path(api_path)) |
|
174 | os.unlink(self.to_os_path(api_path)) | |
175 |
|
175 | |||
176 | def isfile(self, api_path): |
|
176 | def isfile(self, api_path): | |
177 | return os.path.isfile(self.to_os_path(api_path)) |
|
177 | return os.path.isfile(self.to_os_path(api_path)) | |
178 |
|
178 | |||
179 | def isdir(self, api_path): |
|
179 | def isdir(self, api_path): | |
180 | return os.path.isdir(self.to_os_path(api_path)) |
|
180 | return os.path.isdir(self.to_os_path(api_path)) | |
181 |
|
181 | |||
182 | def setUp(self): |
|
182 | def setUp(self): | |
183 |
|
183 | |||
184 | for d in (self.dirs + self.hidden_dirs): |
|
184 | for d in (self.dirs + self.hidden_dirs): | |
185 | self.make_dir(d) |
|
185 | self.make_dir(d) | |
186 |
|
186 | |||
187 | for d, name in self.dirs_nbs: |
|
187 | for d, name in self.dirs_nbs: | |
188 | # create a notebook |
|
188 | # create a notebook | |
189 | nb = new_notebook() |
|
189 | nb = new_notebook() | |
190 | self.make_nb(u'{}/{}.ipynb'.format(d, name), nb) |
|
190 | self.make_nb(u'{}/{}.ipynb'.format(d, name), nb) | |
191 |
|
191 | |||
192 | # create a text file |
|
192 | # create a text file | |
193 | txt = self._txt_for_name(name) |
|
193 | txt = self._txt_for_name(name) | |
194 | self.make_txt(u'{}/{}.txt'.format(d, name), txt) |
|
194 | self.make_txt(u'{}/{}.txt'.format(d, name), txt) | |
195 |
|
195 | |||
196 | # create a binary file |
|
196 | # create a binary file | |
197 | blob = self._blob_for_name(name) |
|
197 | blob = self._blob_for_name(name) | |
198 | self.make_blob(u'{}/{}.blob'.format(d, name), blob) |
|
198 | self.make_blob(u'{}/{}.blob'.format(d, name), blob) | |
199 |
|
199 | |||
200 | self.api = API(self.base_url()) |
|
200 | self.api = API(self.base_url()) | |
201 |
|
201 | |||
202 | def tearDown(self): |
|
202 | def tearDown(self): | |
203 | for dname in (list(self.top_level_dirs) + self.hidden_dirs): |
|
203 | for dname in (list(self.top_level_dirs) + self.hidden_dirs): | |
204 | self.delete_dir(dname) |
|
204 | self.delete_dir(dname) | |
205 | self.delete_file('inroot.ipynb') |
|
205 | self.delete_file('inroot.ipynb') | |
206 |
|
206 | |||
207 | def test_list_notebooks(self): |
|
207 | def test_list_notebooks(self): | |
208 | nbs = notebooks_only(self.api.list().json()) |
|
208 | nbs = notebooks_only(self.api.list().json()) | |
209 | self.assertEqual(len(nbs), 1) |
|
209 | self.assertEqual(len(nbs), 1) | |
210 | self.assertEqual(nbs[0]['name'], 'inroot.ipynb') |
|
210 | self.assertEqual(nbs[0]['name'], 'inroot.ipynb') | |
211 |
|
211 | |||
212 | nbs = notebooks_only(self.api.list('/Directory with spaces in/').json()) |
|
212 | nbs = notebooks_only(self.api.list('/Directory with spaces in/').json()) | |
213 | self.assertEqual(len(nbs), 1) |
|
213 | self.assertEqual(len(nbs), 1) | |
214 | self.assertEqual(nbs[0]['name'], 'inspace.ipynb') |
|
214 | self.assertEqual(nbs[0]['name'], 'inspace.ipynb') | |
215 |
|
215 | |||
216 | nbs = notebooks_only(self.api.list(u'/unicodé/').json()) |
|
216 | nbs = notebooks_only(self.api.list(u'/unicodé/').json()) | |
217 | self.assertEqual(len(nbs), 1) |
|
217 | self.assertEqual(len(nbs), 1) | |
218 | self.assertEqual(nbs[0]['name'], 'innonascii.ipynb') |
|
218 | self.assertEqual(nbs[0]['name'], 'innonascii.ipynb') | |
219 | self.assertEqual(nbs[0]['path'], u'unicodé/innonascii.ipynb') |
|
219 | self.assertEqual(nbs[0]['path'], u'unicodé/innonascii.ipynb') | |
220 |
|
220 | |||
221 | nbs = notebooks_only(self.api.list('/foo/bar/').json()) |
|
221 | nbs = notebooks_only(self.api.list('/foo/bar/').json()) | |
222 | self.assertEqual(len(nbs), 1) |
|
222 | self.assertEqual(len(nbs), 1) | |
223 | self.assertEqual(nbs[0]['name'], 'baz.ipynb') |
|
223 | self.assertEqual(nbs[0]['name'], 'baz.ipynb') | |
224 | self.assertEqual(nbs[0]['path'], 'foo/bar/baz.ipynb') |
|
224 | self.assertEqual(nbs[0]['path'], 'foo/bar/baz.ipynb') | |
225 |
|
225 | |||
226 | nbs = notebooks_only(self.api.list('foo').json()) |
|
226 | nbs = notebooks_only(self.api.list('foo').json()) | |
227 | self.assertEqual(len(nbs), 4) |
|
227 | self.assertEqual(len(nbs), 4) | |
228 | nbnames = { normalize('NFC', n['name']) for n in nbs } |
|
228 | nbnames = { normalize('NFC', n['name']) for n in nbs } | |
229 | expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodé.ipynb'] |
|
229 | expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodé.ipynb'] | |
230 | expected = { normalize('NFC', name) for name in expected } |
|
230 | expected = { normalize('NFC', name) for name in expected } | |
231 | self.assertEqual(nbnames, expected) |
|
231 | self.assertEqual(nbnames, expected) | |
232 |
|
232 | |||
233 | nbs = notebooks_only(self.api.list('ordering').json()) |
|
233 | nbs = notebooks_only(self.api.list('ordering').json()) | |
234 | nbnames = [n['name'] for n in nbs] |
|
234 | nbnames = [n['name'] for n in nbs] | |
235 | expected = ['A.ipynb', 'b.ipynb', 'C.ipynb'] |
|
235 | expected = ['A.ipynb', 'b.ipynb', 'C.ipynb'] | |
236 | self.assertEqual(nbnames, expected) |
|
236 | self.assertEqual(nbnames, expected) | |
237 |
|
237 | |||
238 | def test_list_dirs(self): |
|
238 | def test_list_dirs(self): | |
239 | dirs = dirs_only(self.api.list().json()) |
|
239 | dirs = dirs_only(self.api.list().json()) | |
240 | dir_names = {normalize('NFC', d['name']) for d in dirs} |
|
240 | dir_names = {normalize('NFC', d['name']) for d in dirs} | |
241 | self.assertEqual(dir_names, self.top_level_dirs) # Excluding hidden dirs |
|
241 | self.assertEqual(dir_names, self.top_level_dirs) # Excluding hidden dirs | |
242 |
|
242 | |||
243 | def test_list_nonexistant_dir(self): |
|
243 | def test_list_nonexistant_dir(self): | |
244 | with assert_http_error(404): |
|
244 | with assert_http_error(404): | |
245 | self.api.list('nonexistant') |
|
245 | self.api.list('nonexistant') | |
246 |
|
246 | |||
247 | def test_get_nb_contents(self): |
|
247 | def test_get_nb_contents(self): | |
248 | for d, name in self.dirs_nbs: |
|
248 | for d, name in self.dirs_nbs: | |
249 | path = url_path_join(d, name + '.ipynb') |
|
249 | path = url_path_join(d, name + '.ipynb') | |
250 | nb = self.api.read(path).json() |
|
250 | nb = self.api.read(path).json() | |
251 | self.assertEqual(nb['name'], u'%s.ipynb' % name) |
|
251 | self.assertEqual(nb['name'], u'%s.ipynb' % name) | |
252 | self.assertEqual(nb['path'], path) |
|
252 | self.assertEqual(nb['path'], path) | |
253 | self.assertEqual(nb['type'], 'notebook') |
|
253 | self.assertEqual(nb['type'], 'notebook') | |
254 | self.assertIn('content', nb) |
|
254 | self.assertIn('content', nb) | |
255 | self.assertEqual(nb['format'], 'json') |
|
255 | self.assertEqual(nb['format'], 'json') | |
256 | self.assertIn('content', nb) |
|
256 | self.assertIn('content', nb) | |
257 | self.assertIn('metadata', nb['content']) |
|
257 | self.assertIn('metadata', nb['content']) | |
258 | self.assertIsInstance(nb['content']['metadata'], dict) |
|
258 | self.assertIsInstance(nb['content']['metadata'], dict) | |
259 |
|
259 | |||
260 | def test_get_contents_no_such_file(self): |
|
260 | def test_get_contents_no_such_file(self): | |
261 | # Name that doesn't exist - should be a 404 |
|
261 | # Name that doesn't exist - should be a 404 | |
262 | with assert_http_error(404): |
|
262 | with assert_http_error(404): | |
263 | self.api.read('foo/q.ipynb') |
|
263 | self.api.read('foo/q.ipynb') | |
264 |
|
264 | |||
265 | def test_get_text_file_contents(self): |
|
265 | def test_get_text_file_contents(self): | |
266 | for d, name in self.dirs_nbs: |
|
266 | for d, name in self.dirs_nbs: | |
267 | path = url_path_join(d, name + '.txt') |
|
267 | path = url_path_join(d, name + '.txt') | |
268 | model = self.api.read(path).json() |
|
268 | model = self.api.read(path).json() | |
269 | self.assertEqual(model['name'], u'%s.txt' % name) |
|
269 | self.assertEqual(model['name'], u'%s.txt' % name) | |
270 | self.assertEqual(model['path'], path) |
|
270 | self.assertEqual(model['path'], path) | |
271 | self.assertIn('content', model) |
|
271 | self.assertIn('content', model) | |
272 | self.assertEqual(model['format'], 'text') |
|
272 | self.assertEqual(model['format'], 'text') | |
273 | self.assertEqual(model['type'], 'file') |
|
273 | self.assertEqual(model['type'], 'file') | |
274 | self.assertEqual(model['content'], self._txt_for_name(name)) |
|
274 | self.assertEqual(model['content'], self._txt_for_name(name)) | |
275 |
|
275 | |||
276 | # Name that doesn't exist - should be a 404 |
|
276 | # Name that doesn't exist - should be a 404 | |
277 | with assert_http_error(404): |
|
277 | with assert_http_error(404): | |
278 | self.api.read('foo/q.txt') |
|
278 | self.api.read('foo/q.txt') | |
279 |
|
279 | |||
280 | # Specifying format=text should fail on a non-UTF-8 file |
|
280 | # Specifying format=text should fail on a non-UTF-8 file | |
281 | with assert_http_error(400): |
|
281 | with assert_http_error(400): | |
282 | self.api.read('foo/bar/baz.blob', type='file', format='text') |
|
282 | self.api.read('foo/bar/baz.blob', type='file', format='text') | |
283 |
|
283 | |||
284 | def test_get_binary_file_contents(self): |
|
284 | def test_get_binary_file_contents(self): | |
285 | for d, name in self.dirs_nbs: |
|
285 | for d, name in self.dirs_nbs: | |
286 | path = url_path_join(d, name + '.blob') |
|
286 | path = url_path_join(d, name + '.blob') | |
287 | model = self.api.read(path).json() |
|
287 | model = self.api.read(path).json() | |
288 | self.assertEqual(model['name'], u'%s.blob' % name) |
|
288 | self.assertEqual(model['name'], u'%s.blob' % name) | |
289 | self.assertEqual(model['path'], path) |
|
289 | self.assertEqual(model['path'], path) | |
290 | self.assertIn('content', model) |
|
290 | self.assertIn('content', model) | |
291 | self.assertEqual(model['format'], 'base64') |
|
291 | self.assertEqual(model['format'], 'base64') | |
292 | self.assertEqual(model['type'], 'file') |
|
292 | self.assertEqual(model['type'], 'file') | |
293 | self.assertEqual( |
|
293 | self.assertEqual( | |
294 | base64.decodestring(model['content'].encode('ascii')), |
|
294 | base64.decodestring(model['content'].encode('ascii')), | |
295 | self._blob_for_name(name), |
|
295 | self._blob_for_name(name), | |
296 | ) |
|
296 | ) | |
297 |
|
297 | |||
298 | # Name that doesn't exist - should be a 404 |
|
298 | # Name that doesn't exist - should be a 404 | |
299 | with assert_http_error(404): |
|
299 | with assert_http_error(404): | |
300 | self.api.read('foo/q.txt') |
|
300 | self.api.read('foo/q.txt') | |
301 |
|
301 | |||
302 | def test_get_bad_type(self): |
|
302 | def test_get_bad_type(self): | |
303 | with assert_http_error(400): |
|
303 | with assert_http_error(400): | |
304 | self.api.read(u'unicodé', type='file') # this is a directory |
|
304 | self.api.read(u'unicodé', type='file') # this is a directory | |
305 |
|
305 | |||
306 | with assert_http_error(400): |
|
306 | with assert_http_error(400): | |
307 | self.api.read(u'unicodé/innonascii.ipynb', type='directory') |
|
307 | self.api.read(u'unicodé/innonascii.ipynb', type='directory') | |
308 |
|
308 | |||
309 | def _check_created(self, resp, path, type='notebook'): |
|
309 | def _check_created(self, resp, path, type='notebook'): | |
310 | self.assertEqual(resp.status_code, 201) |
|
310 | self.assertEqual(resp.status_code, 201) | |
311 | location_header = py3compat.str_to_unicode(resp.headers['Location']) |
|
311 | location_header = py3compat.str_to_unicode(resp.headers['Location']) | |
312 | self.assertEqual(location_header, url_escape(url_path_join(u'/api/contents', path))) |
|
312 | self.assertEqual(location_header, url_escape(url_path_join(u'/api/contents', path))) | |
313 | rjson = resp.json() |
|
313 | rjson = resp.json() | |
314 | self.assertEqual(rjson['name'], path.rsplit('/', 1)[-1]) |
|
314 | self.assertEqual(rjson['name'], path.rsplit('/', 1)[-1]) | |
315 | self.assertEqual(rjson['path'], path) |
|
315 | self.assertEqual(rjson['path'], path) | |
316 | self.assertEqual(rjson['type'], type) |
|
316 | self.assertEqual(rjson['type'], type) | |
317 | isright = self.isdir if type == 'directory' else self.isfile |
|
317 | isright = self.isdir if type == 'directory' else self.isfile | |
318 | assert isright(path) |
|
318 | assert isright(path) | |
319 |
|
319 | |||
320 | def test_create_untitled(self): |
|
320 | def test_create_untitled(self): | |
321 | resp = self.api.create_untitled(path=u'å b') |
|
321 | resp = self.api.create_untitled(path=u'å b') | |
322 | self._check_created(resp, u'å b/Untitled.ipynb') |
|
322 | self._check_created(resp, u'å b/Untitled.ipynb') | |
323 |
|
323 | |||
324 | # Second time |
|
324 | # Second time | |
325 | resp = self.api.create_untitled(path=u'å b') |
|
325 | resp = self.api.create_untitled(path=u'å b') | |
326 | self._check_created(resp, u'å b/Untitled1.ipynb') |
|
326 | self._check_created(resp, u'å b/Untitled1.ipynb') | |
327 |
|
327 | |||
328 | # And two directories down |
|
328 | # And two directories down | |
329 | resp = self.api.create_untitled(path='foo/bar') |
|
329 | resp = self.api.create_untitled(path='foo/bar') | |
330 | self._check_created(resp, 'foo/bar/Untitled.ipynb') |
|
330 | self._check_created(resp, 'foo/bar/Untitled.ipynb') | |
331 |
|
331 | |||
332 | def test_create_untitled_txt(self): |
|
332 | def test_create_untitled_txt(self): | |
333 | resp = self.api.create_untitled(path='foo/bar', ext='.txt') |
|
333 | resp = self.api.create_untitled(path='foo/bar', ext='.txt') | |
334 | self._check_created(resp, 'foo/bar/untitled.txt', type='file') |
|
334 | self._check_created(resp, 'foo/bar/untitled.txt', type='file') | |
335 |
|
335 | |||
336 | resp = self.api.read(path='foo/bar/untitled.txt') |
|
336 | resp = self.api.read(path='foo/bar/untitled.txt') | |
337 | model = resp.json() |
|
337 | model = resp.json() | |
338 | self.assertEqual(model['type'], 'file') |
|
338 | self.assertEqual(model['type'], 'file') | |
339 | self.assertEqual(model['format'], 'text') |
|
339 | self.assertEqual(model['format'], 'text') | |
340 | self.assertEqual(model['content'], '') |
|
340 | self.assertEqual(model['content'], '') | |
341 |
|
341 | |||
342 | def test_upload(self): |
|
342 | def test_upload(self): | |
343 | nb = new_notebook() |
|
343 | nb = new_notebook() | |
344 | nbmodel = {'content': nb, 'type': 'notebook'} |
|
344 | nbmodel = {'content': nb, 'type': 'notebook'} | |
345 | path = u'å b/Upload tést.ipynb' |
|
345 | path = u'å b/Upload tést.ipynb' | |
346 | resp = self.api.upload(path, body=json.dumps(nbmodel)) |
|
346 | resp = self.api.upload(path, body=json.dumps(nbmodel)) | |
347 | self._check_created(resp, path) |
|
347 | self._check_created(resp, path) | |
348 |
|
348 | |||
349 | def test_mkdir_untitled(self): |
|
349 | def test_mkdir_untitled(self): | |
350 | resp = self.api.mkdir_untitled(path=u'å b') |
|
350 | resp = self.api.mkdir_untitled(path=u'å b') | |
351 | self._check_created(resp, u'å b/Untitled Folder', type='directory') |
|
351 | self._check_created(resp, u'å b/Untitled Folder', type='directory') | |
352 |
|
352 | |||
353 | # Second time |
|
353 | # Second time | |
354 | resp = self.api.mkdir_untitled(path=u'å b') |
|
354 | resp = self.api.mkdir_untitled(path=u'å b') | |
355 | self._check_created(resp, u'å b/Untitled Folder 1', type='directory') |
|
355 | self._check_created(resp, u'å b/Untitled Folder 1', type='directory') | |
356 |
|
356 | |||
357 | # And two directories down |
|
357 | # And two directories down | |
358 | resp = self.api.mkdir_untitled(path='foo/bar') |
|
358 | resp = self.api.mkdir_untitled(path='foo/bar') | |
359 | self._check_created(resp, 'foo/bar/Untitled Folder', type='directory') |
|
359 | self._check_created(resp, 'foo/bar/Untitled Folder', type='directory') | |
360 |
|
360 | |||
361 | def test_mkdir(self): |
|
361 | def test_mkdir(self): | |
362 | path = u'å b/New ∂ir' |
|
362 | path = u'å b/New ∂ir' | |
363 | resp = self.api.mkdir(path) |
|
363 | resp = self.api.mkdir(path) | |
364 | self._check_created(resp, path, type='directory') |
|
364 | self._check_created(resp, path, type='directory') | |
365 |
|
365 | |||
366 | def test_mkdir_hidden_400(self): |
|
366 | def test_mkdir_hidden_400(self): | |
367 | with assert_http_error(400): |
|
367 | with assert_http_error(400): | |
368 | resp = self.api.mkdir(u'å b/.hidden') |
|
368 | resp = self.api.mkdir(u'å b/.hidden') | |
369 |
|
369 | |||
370 | def test_upload_txt(self): |
|
370 | def test_upload_txt(self): | |
371 | body = u'ünicode téxt' |
|
371 | body = u'ünicode téxt' | |
372 | model = { |
|
372 | model = { | |
373 | 'content' : body, |
|
373 | 'content' : body, | |
374 | 'format' : 'text', |
|
374 | 'format' : 'text', | |
375 | 'type' : 'file', |
|
375 | 'type' : 'file', | |
376 | } |
|
376 | } | |
377 | path = u'å b/Upload tést.txt' |
|
377 | path = u'å b/Upload tést.txt' | |
378 | resp = self.api.upload(path, body=json.dumps(model)) |
|
378 | resp = self.api.upload(path, body=json.dumps(model)) | |
379 |
|
379 | |||
380 | # check roundtrip |
|
380 | # check roundtrip | |
381 | resp = self.api.read(path) |
|
381 | resp = self.api.read(path) | |
382 | model = resp.json() |
|
382 | model = resp.json() | |
383 | self.assertEqual(model['type'], 'file') |
|
383 | self.assertEqual(model['type'], 'file') | |
384 | self.assertEqual(model['format'], 'text') |
|
384 | self.assertEqual(model['format'], 'text') | |
385 | self.assertEqual(model['content'], body) |
|
385 | self.assertEqual(model['content'], body) | |
386 |
|
386 | |||
387 | def test_upload_b64(self): |
|
387 | def test_upload_b64(self): | |
388 | body = b'\xFFblob' |
|
388 | body = b'\xFFblob' | |
389 | b64body = base64.encodestring(body).decode('ascii') |
|
389 | b64body = base64.encodestring(body).decode('ascii') | |
390 | model = { |
|
390 | model = { | |
391 | 'content' : b64body, |
|
391 | 'content' : b64body, | |
392 | 'format' : 'base64', |
|
392 | 'format' : 'base64', | |
393 | 'type' : 'file', |
|
393 | 'type' : 'file', | |
394 | } |
|
394 | } | |
395 | path = u'å b/Upload tést.blob' |
|
395 | path = u'å b/Upload tést.blob' | |
396 | resp = self.api.upload(path, body=json.dumps(model)) |
|
396 | resp = self.api.upload(path, body=json.dumps(model)) | |
397 |
|
397 | |||
398 | # check roundtrip |
|
398 | # check roundtrip | |
399 | resp = self.api.read(path) |
|
399 | resp = self.api.read(path) | |
400 | model = resp.json() |
|
400 | model = resp.json() | |
401 | self.assertEqual(model['type'], 'file') |
|
401 | self.assertEqual(model['type'], 'file') | |
402 | self.assertEqual(model['path'], path) |
|
402 | self.assertEqual(model['path'], path) | |
403 | self.assertEqual(model['format'], 'base64') |
|
403 | self.assertEqual(model['format'], 'base64') | |
404 | decoded = base64.decodestring(model['content'].encode('ascii')) |
|
404 | decoded = base64.decodestring(model['content'].encode('ascii')) | |
405 | self.assertEqual(decoded, body) |
|
405 | self.assertEqual(decoded, body) | |
406 |
|
406 | |||
407 | def test_upload_v2(self): |
|
407 | def test_upload_v2(self): | |
408 | nb = v2.new_notebook() |
|
408 | nb = v2.new_notebook() | |
409 | ws = v2.new_worksheet() |
|
409 | ws = v2.new_worksheet() | |
410 | nb.worksheets.append(ws) |
|
410 | nb.worksheets.append(ws) | |
411 | ws.cells.append(v2.new_code_cell(input='print("hi")')) |
|
411 | ws.cells.append(v2.new_code_cell(input='print("hi")')) | |
412 | nbmodel = {'content': nb, 'type': 'notebook'} |
|
412 | nbmodel = {'content': nb, 'type': 'notebook'} | |
413 | path = u'å b/Upload tést.ipynb' |
|
413 | path = u'å b/Upload tést.ipynb' | |
414 | resp = self.api.upload(path, body=json.dumps(nbmodel)) |
|
414 | resp = self.api.upload(path, body=json.dumps(nbmodel)) | |
415 | self._check_created(resp, path) |
|
415 | self._check_created(resp, path) | |
416 | resp = self.api.read(path) |
|
416 | resp = self.api.read(path) | |
417 | data = resp.json() |
|
417 | data = resp.json() | |
418 | self.assertEqual(data['content']['nbformat'], 4) |
|
418 | self.assertEqual(data['content']['nbformat'], 4) | |
419 |
|
419 | |||
420 | def test_copy(self): |
|
420 | def test_copy(self): | |
421 | resp = self.api.copy(u'å b/ç d.ipynb', u'å b') |
|
421 | resp = self.api.copy(u'å b/ç d.ipynb', u'å b') | |
422 | self._check_created(resp, u'å b/ç d-Copy1.ipynb') |
|
422 | self._check_created(resp, u'å b/ç d-Copy1.ipynb') | |
423 |
|
423 | |||
424 | resp = self.api.copy(u'å b/ç d.ipynb', u'å b') |
|
424 | resp = self.api.copy(u'å b/ç d.ipynb', u'å b') | |
425 | self._check_created(resp, u'å b/ç d-Copy2.ipynb') |
|
425 | self._check_created(resp, u'å b/ç d-Copy2.ipynb') | |
426 |
|
426 | |||
427 | def test_copy_copy(self): |
|
427 | def test_copy_copy(self): | |
428 | resp = self.api.copy(u'å b/ç d.ipynb', u'å b') |
|
428 | resp = self.api.copy(u'å b/ç d.ipynb', u'å b') | |
429 | self._check_created(resp, u'å b/ç d-Copy1.ipynb') |
|
429 | self._check_created(resp, u'å b/ç d-Copy1.ipynb') | |
430 |
|
430 | |||
431 | resp = self.api.copy(u'å b/ç d-Copy1.ipynb', u'å b') |
|
431 | resp = self.api.copy(u'å b/ç d-Copy1.ipynb', u'å b') | |
432 | self._check_created(resp, u'å b/ç d-Copy2.ipynb') |
|
432 | self._check_created(resp, u'å b/ç d-Copy2.ipynb') | |
433 |
|
433 | |||
434 | def test_copy_path(self): |
|
434 | def test_copy_path(self): | |
435 | resp = self.api.copy(u'foo/a.ipynb', u'å b') |
|
435 | resp = self.api.copy(u'foo/a.ipynb', u'å b') | |
436 | self._check_created(resp, u'å b/a.ipynb') |
|
436 | self._check_created(resp, u'å b/a.ipynb') | |
437 |
|
437 | |||
438 | resp = self.api.copy(u'foo/a.ipynb', u'å b') |
|
438 | resp = self.api.copy(u'foo/a.ipynb', u'å b') | |
439 | self._check_created(resp, u'å b/a-Copy1.ipynb') |
|
439 | self._check_created(resp, u'å b/a-Copy1.ipynb') | |
440 |
|
440 | |||
441 | def test_copy_put_400(self): |
|
441 | def test_copy_put_400(self): | |
442 | with assert_http_error(400): |
|
442 | with assert_http_error(400): | |
443 | resp = self.api.copy_put(u'å b/ç d.ipynb', u'å b/cøpy.ipynb') |
|
443 | resp = self.api.copy_put(u'å b/ç d.ipynb', u'å b/cøpy.ipynb') | |
444 |
|
444 | |||
445 | def test_copy_dir_400(self): |
|
445 | def test_copy_dir_400(self): | |
446 | # can't copy directories |
|
446 | # can't copy directories | |
447 | with assert_http_error(400): |
|
447 | with assert_http_error(400): | |
448 | resp = self.api.copy(u'å b', u'foo') |
|
448 | resp = self.api.copy(u'å b', u'foo') | |
449 |
|
449 | |||
450 | def test_delete(self): |
|
450 | def test_delete(self): | |
451 | for d, name in self.dirs_nbs: |
|
451 | for d, name in self.dirs_nbs: | |
452 | print('%r, %r' % (d, name)) |
|
452 | print('%r, %r' % (d, name)) | |
453 | resp = self.api.delete(url_path_join(d, name + '.ipynb')) |
|
453 | resp = self.api.delete(url_path_join(d, name + '.ipynb')) | |
454 | self.assertEqual(resp.status_code, 204) |
|
454 | self.assertEqual(resp.status_code, 204) | |
455 |
|
455 | |||
456 | for d in self.dirs + ['/']: |
|
456 | for d in self.dirs + ['/']: | |
457 | nbs = notebooks_only(self.api.list(d).json()) |
|
457 | nbs = notebooks_only(self.api.list(d).json()) | |
458 | print('------') |
|
458 | print('------') | |
459 | print(d) |
|
459 | print(d) | |
460 | print(nbs) |
|
460 | print(nbs) | |
461 | self.assertEqual(nbs, []) |
|
461 | self.assertEqual(nbs, []) | |
462 |
|
462 | |||
463 | def test_delete_dirs(self): |
|
463 | def test_delete_dirs(self): | |
464 | # depth-first delete everything, so we don't try to delete empty directories |
|
464 | # depth-first delete everything, so we don't try to delete empty directories | |
465 | for name in sorted(self.dirs + ['/'], key=len, reverse=True): |
|
465 | for name in sorted(self.dirs + ['/'], key=len, reverse=True): | |
466 | listing = self.api.list(name).json()['content'] |
|
466 | listing = self.api.list(name).json()['content'] | |
467 | for model in listing: |
|
467 | for model in listing: | |
468 | self.api.delete(model['path']) |
|
468 | self.api.delete(model['path']) | |
469 | listing = self.api.list('/').json()['content'] |
|
469 | listing = self.api.list('/').json()['content'] | |
470 | self.assertEqual(listing, []) |
|
470 | self.assertEqual(listing, []) | |
471 |
|
471 | |||
472 | def test_delete_non_empty_dir(self): |
|
472 | def test_delete_non_empty_dir(self): | |
473 | """delete non-empty dir raises 400""" |
|
473 | """delete non-empty dir raises 400""" | |
474 | with assert_http_error(400): |
|
474 | with assert_http_error(400): | |
475 | self.api.delete(u'å b') |
|
475 | self.api.delete(u'å b') | |
476 |
|
476 | |||
477 | def test_rename(self): |
|
477 | def test_rename(self): | |
478 | resp = self.api.rename('foo/a.ipynb', 'foo/z.ipynb') |
|
478 | resp = self.api.rename('foo/a.ipynb', 'foo/z.ipynb') | |
479 | self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb') |
|
479 | self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb') | |
480 | self.assertEqual(resp.json()['name'], 'z.ipynb') |
|
480 | self.assertEqual(resp.json()['name'], 'z.ipynb') | |
481 | self.assertEqual(resp.json()['path'], 'foo/z.ipynb') |
|
481 | self.assertEqual(resp.json()['path'], 'foo/z.ipynb') | |
482 | assert self.isfile('foo/z.ipynb') |
|
482 | assert self.isfile('foo/z.ipynb') | |
483 |
|
483 | |||
484 | nbs = notebooks_only(self.api.list('foo').json()) |
|
484 | nbs = notebooks_only(self.api.list('foo').json()) | |
485 | nbnames = set(n['name'] for n in nbs) |
|
485 | nbnames = set(n['name'] for n in nbs) | |
486 | self.assertIn('z.ipynb', nbnames) |
|
486 | self.assertIn('z.ipynb', nbnames) | |
487 | self.assertNotIn('a.ipynb', nbnames) |
|
487 | self.assertNotIn('a.ipynb', nbnames) | |
488 |
|
488 | |||
489 | def test_rename_existing(self): |
|
489 | def test_rename_existing(self): | |
490 | with assert_http_error(409): |
|
490 | with assert_http_error(409): | |
491 | self.api.rename('foo/a.ipynb', 'foo/b.ipynb') |
|
491 | self.api.rename('foo/a.ipynb', 'foo/b.ipynb') | |
492 |
|
492 | |||
493 | def test_save(self): |
|
493 | def test_save(self): | |
494 | resp = self.api.read('foo/a.ipynb') |
|
494 | resp = self.api.read('foo/a.ipynb') | |
495 | nbcontent = json.loads(resp.text)['content'] |
|
495 | nbcontent = json.loads(resp.text)['content'] | |
496 | nb = from_dict(nbcontent) |
|
496 | nb = from_dict(nbcontent) | |
497 | nb.cells.append(new_markdown_cell(u'Created by test ³')) |
|
497 | nb.cells.append(new_markdown_cell(u'Created by test ³')) | |
498 |
|
498 | |||
499 | nbmodel= {'content': nb, 'type': 'notebook'} |
|
499 | nbmodel= {'content': nb, 'type': 'notebook'} | |
500 | resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) |
|
500 | resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) | |
501 |
|
501 | |||
502 | nbcontent = self.api.read('foo/a.ipynb').json()['content'] |
|
502 | nbcontent = self.api.read('foo/a.ipynb').json()['content'] | |
503 | newnb = from_dict(nbcontent) |
|
503 | newnb = from_dict(nbcontent) | |
504 | self.assertEqual(newnb.cells[0].source, |
|
504 | self.assertEqual(newnb.cells[0].source, | |
505 | u'Created by test ³') |
|
505 | u'Created by test ³') | |
506 |
|
506 | |||
507 | def test_checkpoints(self): |
|
507 | def test_checkpoints(self): | |
508 | resp = self.api.read('foo/a.ipynb') |
|
508 | resp = self.api.read('foo/a.ipynb') | |
509 | r = self.api.new_checkpoint('foo/a.ipynb') |
|
509 | r = self.api.new_checkpoint('foo/a.ipynb') | |
510 | self.assertEqual(r.status_code, 201) |
|
510 | self.assertEqual(r.status_code, 201) | |
511 | cp1 = r.json() |
|
511 | cp1 = r.json() | |
512 | self.assertEqual(set(cp1), {'id', 'last_modified'}) |
|
512 | self.assertEqual(set(cp1), {'id', 'last_modified'}) | |
513 | self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) |
|
513 | self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) | |
514 |
|
514 | |||
515 | # Modify it |
|
515 | # Modify it | |
516 | nbcontent = json.loads(resp.text)['content'] |
|
516 | nbcontent = json.loads(resp.text)['content'] | |
517 | nb = from_dict(nbcontent) |
|
517 | nb = from_dict(nbcontent) | |
518 | hcell = new_markdown_cell('Created by test') |
|
518 | hcell = new_markdown_cell('Created by test') | |
519 | nb.cells.append(hcell) |
|
519 | nb.cells.append(hcell) | |
520 | # Save |
|
520 | # Save | |
521 | nbmodel= {'content': nb, 'type': 'notebook'} |
|
521 | nbmodel= {'content': nb, 'type': 'notebook'} | |
522 | resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) |
|
522 | resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel)) | |
523 |
|
523 | |||
524 | # List checkpoints |
|
524 | # List checkpoints | |
525 | cps = self.api.get_checkpoints('foo/a.ipynb').json() |
|
525 | cps = self.api.get_checkpoints('foo/a.ipynb').json() | |
526 | self.assertEqual(cps, [cp1]) |
|
526 | self.assertEqual(cps, [cp1]) | |
527 |
|
527 | |||
528 | nbcontent = self.api.read('foo/a.ipynb').json()['content'] |
|
528 | nbcontent = self.api.read('foo/a.ipynb').json()['content'] | |
529 | nb = from_dict(nbcontent) |
|
529 | nb = from_dict(nbcontent) | |
530 | self.assertEqual(nb.cells[0].source, 'Created by test') |
|
530 | self.assertEqual(nb.cells[0].source, 'Created by test') | |
531 |
|
531 | |||
532 | # Restore cp1 |
|
532 | # Restore cp1 | |
533 | r = self.api.restore_checkpoint('foo/a.ipynb', cp1['id']) |
|
533 | r = self.api.restore_checkpoint('foo/a.ipynb', cp1['id']) | |
534 | self.assertEqual(r.status_code, 204) |
|
534 | self.assertEqual(r.status_code, 204) | |
535 | nbcontent = self.api.read('foo/a.ipynb').json()['content'] |
|
535 | nbcontent = self.api.read('foo/a.ipynb').json()['content'] | |
536 | nb = from_dict(nbcontent) |
|
536 | nb = from_dict(nbcontent) | |
537 | self.assertEqual(nb.cells, []) |
|
537 | self.assertEqual(nb.cells, []) | |
538 |
|
538 | |||
539 | # Delete cp1 |
|
539 | # Delete cp1 | |
540 | r = self.api.delete_checkpoint('foo/a.ipynb', cp1['id']) |
|
540 | r = self.api.delete_checkpoint('foo/a.ipynb', cp1['id']) | |
541 | self.assertEqual(r.status_code, 204) |
|
541 | self.assertEqual(r.status_code, 204) | |
542 | cps = self.api.get_checkpoints('foo/a.ipynb').json() |
|
542 | cps = self.api.get_checkpoints('foo/a.ipynb').json() | |
543 | self.assertEqual(cps, []) |
|
543 | self.assertEqual(cps, []) | |
544 |
|
544 | |||
545 | def test_file_checkpoints(self): |
|
545 | def test_file_checkpoints(self): | |
546 | """ |
|
546 | """ | |
547 | Test checkpointing of non-notebook files. |
|
547 | Test checkpointing of non-notebook files. | |
548 | """ |
|
548 | """ | |
549 | filename = 'foo/a.txt' |
|
549 | filename = 'foo/a.txt' | |
550 | resp = self.api.read(filename) |
|
550 | resp = self.api.read(filename) | |
551 | orig_content = json.loads(resp.text)['content'] |
|
551 | orig_content = json.loads(resp.text)['content'] | |
552 |
|
552 | |||
553 | # Create a checkpoint. |
|
553 | # Create a checkpoint. | |
554 | r = self.api.new_checkpoint(filename) |
|
554 | r = self.api.new_checkpoint(filename) | |
555 | self.assertEqual(r.status_code, 201) |
|
555 | self.assertEqual(r.status_code, 201) | |
556 | cp1 = r.json() |
|
556 | cp1 = r.json() | |
557 | self.assertEqual(set(cp1), {'id', 'last_modified'}) |
|
557 | self.assertEqual(set(cp1), {'id', 'last_modified'}) | |
558 | self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) |
|
558 | self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id']) | |
559 |
|
559 | |||
560 | # Modify the file and save. |
|
560 | # Modify the file and save. | |
561 | new_content = orig_content + '\nsecond line' |
|
561 | new_content = orig_content + '\nsecond line' | |
562 | model = { |
|
562 | model = { | |
563 | 'content': new_content, |
|
563 | 'content': new_content, | |
564 | 'type': 'file', |
|
564 | 'type': 'file', | |
565 | 'format': 'text', |
|
565 | 'format': 'text', | |
566 | } |
|
566 | } | |
567 | resp = self.api.save(filename, body=json.dumps(model)) |
|
567 | resp = self.api.save(filename, body=json.dumps(model)) | |
568 |
|
568 | |||
569 | # List checkpoints |
|
569 | # List checkpoints | |
570 | cps = self.api.get_checkpoints(filename).json() |
|
570 | cps = self.api.get_checkpoints(filename).json() | |
571 | self.assertEqual(cps, [cp1]) |
|
571 | self.assertEqual(cps, [cp1]) | |
572 |
|
572 | |||
573 | content = self.api.read(filename).json()['content'] |
|
573 | content = self.api.read(filename).json()['content'] | |
574 | self.assertEqual(content, new_content) |
|
574 | self.assertEqual(content, new_content) | |
575 |
|
575 | |||
576 | # Restore cp1 |
|
576 | # Restore cp1 | |
577 | r = self.api.restore_checkpoint(filename, cp1['id']) |
|
577 | r = self.api.restore_checkpoint(filename, cp1['id']) | |
578 | self.assertEqual(r.status_code, 204) |
|
578 | self.assertEqual(r.status_code, 204) | |
579 | restored_content = self.api.read(filename).json()['content'] |
|
579 | restored_content = self.api.read(filename).json()['content'] | |
580 | self.assertEqual(restored_content, orig_content) |
|
580 | self.assertEqual(restored_content, orig_content) | |
581 |
|
581 | |||
582 | # Delete cp1 |
|
582 | # Delete cp1 | |
583 | r = self.api.delete_checkpoint(filename, cp1['id']) |
|
583 | r = self.api.delete_checkpoint(filename, cp1['id']) | |
584 | self.assertEqual(r.status_code, 204) |
|
584 | self.assertEqual(r.status_code, 204) | |
585 | cps = self.api.get_checkpoints(filename).json() |
|
585 | cps = self.api.get_checkpoints(filename).json() | |
586 | self.assertEqual(cps, []) |
|
586 | self.assertEqual(cps, []) | |
587 |
|
587 | |||
588 | @contextmanager |
|
588 | @contextmanager | |
589 | def patch_cp_root(self, dirname): |
|
589 | def patch_cp_root(self, dirname): | |
590 | """ |
|
590 | """ | |
591 | Temporarily patch the root dir of our checkpoint manager. |
|
591 | Temporarily patch the root dir of our checkpoint manager. | |
592 | """ |
|
592 | """ | |
593 | cpm = self.notebook.contents_manager.checkpoint_manager |
|
593 | cpm = self.notebook.contents_manager.checkpoint_manager | |
594 | old_dirname = cpm.root_dir |
|
594 | old_dirname = cpm.root_dir | |
595 | cpm.root_dir = dirname |
|
595 | cpm.root_dir = dirname | |
596 | try: |
|
596 | try: | |
597 | yield |
|
597 | yield | |
598 | finally: |
|
598 | finally: | |
599 | cpm.root_dir = old_dirname |
|
599 | cpm.root_dir = old_dirname | |
600 |
|
600 | |||
601 | def test_checkpoints_separate_root(self): |
|
601 | def test_checkpoints_separate_root(self): | |
602 | """ |
|
602 | """ | |
603 | Test that FileCheckpointManager functions correctly even when it's |
|
603 | Test that FileCheckpointManager functions correctly even when it's | |
604 | using a different root dir from FileContentsManager. This also keeps |
|
604 | using a different root dir from FileContentsManager. This also keeps | |
605 | the implementation honest for use with ContentsManagers that don't map |
|
605 | the implementation honest for use with ContentsManagers that don't map | |
606 | models to the filesystem |
|
606 | models to the filesystem | |
607 |
|
607 | |||
608 | Override this method to a no-op when testing other managers. |
|
608 | Override this method to a no-op when testing other managers. | |
609 | """ |
|
609 | """ | |
610 | with TemporaryDirectory() as td: |
|
610 | with TemporaryDirectory() as td: | |
611 | with self.patch_cp_root(td): |
|
611 | with self.patch_cp_root(td): | |
612 | self.test_checkpoints() |
|
612 | self.test_checkpoints() | |
613 |
|
613 | |||
614 | with TemporaryDirectory() as td: |
|
614 | with TemporaryDirectory() as td: | |
615 | with self.patch_cp_root(td): |
|
615 | with self.patch_cp_root(td): | |
616 | self.test_file_checkpoints() |
|
616 | self.test_file_checkpoints() | |
617 |
|
||||
618 | def test_walk(self): |
|
|||
619 | """ |
|
|||
620 | Test ContentsManager.walk. |
|
|||
621 | """ |
|
|||
622 | results = list(self.notebook.contents_manager.walk()) |
|
|||
623 | expected = [ |
|
|||
624 | ( |
|
|||
625 | '', |
|
|||
626 | [ |
|
|||
627 | 'Directory with spaces in', |
|
|||
628 | 'foo', |
|
|||
629 | 'ordering', |
|
|||
630 | u'unicodé', |
|
|||
631 | u'å b', |
|
|||
632 | ], |
|
|||
633 | ['inroot.blob', 'inroot.ipynb', 'inroot.txt'], |
|
|||
634 | ), |
|
|||
635 | ( |
|
|||
636 | 'Directory with spaces in', |
|
|||
637 | [], |
|
|||
638 | ['inspace.blob', 'inspace.ipynb', 'inspace.txt'], |
|
|||
639 | ), |
|
|||
640 | ( |
|
|||
641 | 'foo', |
|
|||
642 | ['bar'], |
|
|||
643 | [ |
|
|||
644 | 'a.blob', 'a.ipynb', 'a.txt', |
|
|||
645 | 'b.blob', 'b.ipynb', 'b.txt', |
|
|||
646 | 'name with spaces.blob', |
|
|||
647 | 'name with spaces.ipynb', |
|
|||
648 | 'name with spaces.txt', |
|
|||
649 | u'unicodé.blob', u'unicodé.ipynb', u'unicodé.txt' |
|
|||
650 | ] |
|
|||
651 | ), |
|
|||
652 | ( |
|
|||
653 | 'foo/bar', |
|
|||
654 | [], |
|
|||
655 | ['baz.blob', 'baz.ipynb', 'baz.txt'], |
|
|||
656 | ), |
|
|||
657 | ( |
|
|||
658 | 'ordering', |
|
|||
659 | [], |
|
|||
660 | [ |
|
|||
661 | 'A.blob', 'A.ipynb', 'A.txt', |
|
|||
662 | 'C.blob', 'C.ipynb', 'C.txt', |
|
|||
663 | 'b.blob', 'b.ipynb', 'b.txt', |
|
|||
664 | ], |
|
|||
665 | ), |
|
|||
666 | ( |
|
|||
667 | u'unicodé', |
|
|||
668 | [], |
|
|||
669 | ['innonascii.blob', 'innonascii.ipynb', 'innonascii.txt'], |
|
|||
670 | ), |
|
|||
671 | ( |
|
|||
672 | u'å b', |
|
|||
673 | [], |
|
|||
674 | [u'ç d.blob', u'ç d.ipynb', u'ç d.txt'], |
|
|||
675 | ), |
|
|||
676 | ] |
|
|||
677 |
|
||||
678 | for idx, (dname, subdirs, files) in enumerate(expected): |
|
|||
679 | result_dname, result_subdirs, result_files = results[idx] |
|
|||
680 | if dname == '': |
|
|||
681 | sep = '' |
|
|||
682 | else: |
|
|||
683 | sep = '/' |
|
|||
684 | self.assertEqual( |
|
|||
685 | dname, |
|
|||
686 | result_dname, |
|
|||
687 | ) |
|
|||
688 | self.assertEqual( |
|
|||
689 | [sep.join([dname, sub]) for sub in subdirs], |
|
|||
690 | result_subdirs, |
|
|||
691 | ) |
|
|||
692 | self.assertEqual( |
|
|||
693 | [sep.join([dname, fname]) for fname in files], |
|
|||
694 | result_files, |
|
|||
695 | ) |
|
General Comments 0
You need to be logged in to leave comments.
Login now