##// END OF EJS Templates
DEV: Separate FileCheckpointManager and GenericFileCheckpointManager....
Scott Sanderson -
Show More
@@ -0,0 +1,112 b''
1 """
2 Classes for managing Checkpoints.
3 """
4
5 # Copyright (c) IPython Development Team.
6 # Distributed under the terms of the Modified BSD License.
7
8 from IPython.config.configurable import LoggingConfigurable
9
10
11 class CheckpointManager(LoggingConfigurable):
12 """
13 Base class for managing checkpoints for a ContentsManager.
14
15 Subclasses are required to implement:
16
17 create_checkpoint(self, contents_mgr, path)
18 restore_checkpoint(self, contents_mgr, checkpoint_id, path)
19 rename_checkpoint(self, checkpoint_id, old_path, new_path)
20 delete_checkpoint(self, checkpoint_id, path)
21 list_checkpoints(self, path)
22 """
23 def create_checkpoint(self, contents_mgr, path):
24 """Create a checkpoint."""
25 raise NotImplementedError("must be implemented in a subclass")
26
27 def restore_checkpoint(self, contents_mgr, checkpoint_id, path):
28 """Restore a checkpoint"""
29 raise NotImplementedError("must be implemented in a subclass")
30
31 def rename_checkpoint(self, checkpoint_id, old_path, new_path):
32 """Rename a single checkpoint from old_path to new_path."""
33 raise NotImplementedError("must be implemented in a subclass")
34
35 def delete_checkpoint(self, checkpoint_id, path):
36 """delete a checkpoint for a file"""
37 raise NotImplementedError("must be implemented in a subclass")
38
39 def list_checkpoints(self, path):
40 """Return a list of checkpoints for a given file"""
41 raise NotImplementedError("must be implemented in a subclass")
42
43 def rename_all_checkpoints(self, old_path, new_path):
44 """Rename all checkpoints for old_path to new_path."""
45 for cp in self.list_checkpoints(old_path):
46 self.rename_checkpoint(cp['id'], old_path, new_path)
47
48 def delete_all_checkpoints(self, path):
49 """Delete all checkpoints for the given path."""
50 for checkpoint in self.list_checkpoints(path):
51 self.delete_checkpoint(checkpoint['id'], path)
52
53
54 class GenericCheckpointMixin(object):
55 """
56 Helper for creating CheckpointManagers that can be used with any
57 ContentsManager.
58
59 Provides an implementation of `create_checkpoint` and `restore_checkpoint`
60 in terms of the following operations:
61
62 create_file_checkpoint(self, content, format, path)
63 create_notebook_checkpoint(self, nb, path)
64 get_checkpoint(self, checkpoint_id, path, type)
65
66 **Any** valid CheckpointManager implementation should also be valid when
67 this mixin is applied.
68 """
69
70 def create_checkpoint(self, contents_mgr, path):
71 model = contents_mgr.get(path, content=True)
72 type = model['type']
73 if type == 'notebook':
74 return self.create_notebook_checkpoint(
75 model['content'],
76 path,
77 )
78 elif type == 'file':
79 return self.create_file_checkpoint(
80 model['content'],
81 model['format'],
82 path,
83 )
84
85 def restore_checkpoint(self, contents_mgr, checkpoint_id, path):
86 """Restore a checkpoint."""
87 type = contents_mgr.get(path, content=False)['type']
88 model = self.get_checkpoint(checkpoint_id, path, type)
89 contents_mgr.save(model, path)
90
91 # Required Methods
92 def create_file_checkpoint(self, content, format, path):
93 """Create a checkpoint of the current state of a file
94
95 Returns a checkpoint model for the new checkpoint.
96 """
97 raise NotImplementedError("must be implemented in a subclass")
98
99 def create_notebook_checkpoint(self, nb, path):
100 """Create a checkpoint of the current state of a file
101
102 Returns a checkpoint model for the new checkpoint.
103 """
104 raise NotImplementedError("must be implemented in a subclass")
105
106 def get_checkpoint(self, checkpoint_id, path, type):
107 """Get the content of a checkpoint.
108
109 Returns an unvalidated model with the same structure as
110 the return value of ContentsManager.get
111 """
112 raise NotImplementedError("must be implemented in a subclass")
@@ -0,0 +1,198 b''
1 """
2 File-based CheckpointManagers.
3 """
4 import os
5 import shutil
6
7 from tornado.web import HTTPError
8
9 from .checkpoints import (
10 CheckpointManager,
11 GenericCheckpointMixin,
12 )
13 from .fileio import FileManagerMixin
14
15 from IPython.utils import tz
16 from IPython.utils.path import ensure_dir_exists
17 from IPython.utils.py3compat import getcwd
18 from IPython.utils.traitlets import Unicode
19
20
21 class FileCheckpointManager(FileManagerMixin, CheckpointManager):
22 """
23 A CheckpointManager that caches checkpoints for files in adjacent
24 directories.
25
26 Only works with FileContentsManager. Use GenericFileCheckpointManager if
27 you want file-based checkpoints with another ContentsManager.
28 """
29
30 checkpoint_dir = Unicode(
31 '.ipynb_checkpoints',
32 config=True,
33 help="""The directory name in which to keep file checkpoints
34
35 This is a path relative to the file's own directory.
36
37 By default, it is .ipynb_checkpoints
38 """,
39 )
40
41 root_dir = Unicode(config=True)
42
43 def _root_dir_default(self):
44 try:
45 return self.parent.root_dir
46 except AttributeError:
47 return getcwd()
48
49 # ContentsManager-dependent checkpoint API
50 def create_checkpoint(self, contents_mgr, path):
51 """Create a checkpoint."""
52 checkpoint_id = u'checkpoint'
53 src_path = contents_mgr._get_os_path(path)
54 dest_path = self.checkpoint_path(checkpoint_id, path)
55 self._copy(src_path, dest_path)
56 return self.checkpoint_model(checkpoint_id, dest_path)
57
58 def restore_checkpoint(self, contents_mgr, checkpoint_id, path):
59 """Restore a checkpoint."""
60 src_path = self.checkpoint_path(checkpoint_id, path)
61 dest_path = contents_mgr._get_os_path(path)
62 self._copy(src_path, dest_path)
63
64 # ContentsManager-independent checkpoint API
65 def rename_checkpoint(self, checkpoint_id, old_path, new_path):
66 """Rename a checkpoint from old_path to new_path."""
67 old_cp_path = self.checkpoint_path(checkpoint_id, old_path)
68 new_cp_path = self.checkpoint_path(checkpoint_id, new_path)
69 if os.path.isfile(old_cp_path):
70 self.log.debug(
71 "Renaming checkpoint %s -> %s",
72 old_cp_path,
73 new_cp_path,
74 )
75 with self.perm_to_403():
76 shutil.move(old_cp_path, new_cp_path)
77
78 def delete_checkpoint(self, checkpoint_id, path):
79 """delete a file's checkpoint"""
80 path = path.strip('/')
81 cp_path = self.checkpoint_path(checkpoint_id, path)
82 if not os.path.isfile(cp_path):
83 self.no_such_checkpoint(path, checkpoint_id)
84
85 self.log.debug("unlinking %s", cp_path)
86 with self.perm_to_403():
87 os.unlink(cp_path)
88
89 def list_checkpoints(self, path):
90 """list the checkpoints for a given file
91
92 This contents manager currently only supports one checkpoint per file.
93 """
94 path = path.strip('/')
95 checkpoint_id = "checkpoint"
96 os_path = self.checkpoint_path(checkpoint_id, path)
97 if not os.path.isfile(os_path):
98 return []
99 else:
100 return [self.checkpoint_model(checkpoint_id, os_path)]
101
102 # Checkpoint-related utilities
103 def checkpoint_path(self, checkpoint_id, path):
104 """find the path to a checkpoint"""
105 path = path.strip('/')
106 parent, name = ('/' + path).rsplit('/', 1)
107 parent = parent.strip('/')
108 basename, ext = os.path.splitext(name)
109 filename = u"{name}-{checkpoint_id}{ext}".format(
110 name=basename,
111 checkpoint_id=checkpoint_id,
112 ext=ext,
113 )
114 os_path = self._get_os_path(path=parent)
115 cp_dir = os.path.join(os_path, self.checkpoint_dir)
116 with self.perm_to_403():
117 ensure_dir_exists(cp_dir)
118 cp_path = os.path.join(cp_dir, filename)
119 return cp_path
120
121 def checkpoint_model(self, checkpoint_id, os_path):
122 """construct the info dict for a given checkpoint"""
123 stats = os.stat(os_path)
124 last_modified = tz.utcfromtimestamp(stats.st_mtime)
125 info = dict(
126 id=checkpoint_id,
127 last_modified=last_modified,
128 )
129 return info
130
131 # Error Handling
132 def no_such_checkpoint(self, path, checkpoint_id):
133 raise HTTPError(
134 404,
135 u'Checkpoint does not exist: %s@%s' % (path, checkpoint_id)
136 )
137
138
139 class GenericFileCheckpointManager(GenericCheckpointMixin,
140 FileCheckpointManager):
141 """
142 Local filesystem CheckpointManager that works with any conforming
143 ContentsManager.
144 """
145 def create_file_checkpoint(self, content, format, path):
146 """Create a checkpoint from the current content of a notebook."""
147 path = path.strip('/')
148 # only the one checkpoint ID:
149 checkpoint_id = u"checkpoint"
150 os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
151 self.log.debug("creating checkpoint for %s", path)
152 with self.perm_to_403():
153 self._save_file(os_checkpoint_path, content, format=format)
154
155 # return the checkpoint info
156 return self.checkpoint_model(checkpoint_id, os_checkpoint_path)
157
158 def create_notebook_checkpoint(self, nb, path):
159 """Create a checkpoint from the current content of a notebook."""
160 path = path.strip('/')
161 # only the one checkpoint ID:
162 checkpoint_id = u"checkpoint"
163 os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
164 self.log.debug("creating checkpoint for %s", path)
165 with self.perm_to_403():
166 self._save_notebook(os_checkpoint_path, nb)
167
168 # return the checkpoint info
169 return self.checkpoint_model(checkpoint_id, os_checkpoint_path)
170
171 def get_checkpoint(self, checkpoint_id, path, type):
172 """Get the content of a checkpoint.
173
174 Returns a model suitable for passing to ContentsManager.save.
175 """
176 path = path.strip('/')
177 self.log.info("restoring %s from checkpoint %s", path, checkpoint_id)
178 os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
179 if not os.path.isfile(os_checkpoint_path):
180 self.no_such_checkpoint(path, checkpoint_id)
181
182 if type == 'notebook':
183 return {
184 'type': type,
185 'content': self._read_notebook(
186 os_checkpoint_path,
187 as_version=4,
188 ),
189 }
190 elif type == 'file':
191 content, format = self._read_file(os_checkpoint_path, format=None)
192 return {
193 'type': type,
194 'content': content,
195 'format': format,
196 }
197 else:
198 raise HTTPError(500, u'Unexpected type %s' % type)
@@ -0,0 +1,166 b''
1 """
2 Utilities for file-based Contents/Checkpoints managers.
3 """
4
5 # Copyright (c) IPython Development Team.
6 # Distributed under the terms of the Modified BSD License.
7
8 import base64
9 from contextlib import contextmanager
10 import errno
11 import io
12 import os
13 import shutil
14
15 from tornado.web import HTTPError
16
17 from IPython.html.utils import (
18 to_api_path,
19 to_os_path,
20 )
21 from IPython import nbformat
22 from IPython.utils.io import atomic_writing
23 from IPython.utils.py3compat import str_to_unicode
24
25
26 class FileManagerMixin(object):
27 """
28 Mixin for ContentsAPI classes that interact with the filesystem.
29
30 Provides facilities for reading, writing, and copying both notebooks and
31 generic files.
32
33 Shared by FileContentsManager and FileCheckpointManager.
34
35 Note
36 ----
37 Classes using this mixin must provide the following attributes:
38
39 root_dir : unicode
40 A directory against against which API-style paths are to be resolved.
41
42 log : logging.Logger
43 """
44
45 @contextmanager
46 def open(self, os_path, *args, **kwargs):
47 """wrapper around io.open that turns permission errors into 403"""
48 with self.perm_to_403(os_path):
49 with io.open(os_path, *args, **kwargs) as f:
50 yield f
51
52 @contextmanager
53 def atomic_writing(self, os_path, *args, **kwargs):
54 """wrapper around atomic_writing that turns permission errors to 403"""
55 with self.perm_to_403(os_path):
56 with atomic_writing(os_path, *args, **kwargs) as f:
57 yield f
58
59 @contextmanager
60 def perm_to_403(self, os_path=''):
61 """context manager for turning permission errors into 403."""
62 try:
63 yield
64 except OSError as e:
65 if e.errno in {errno.EPERM, errno.EACCES}:
66 # make 403 error message without root prefix
67 # this may not work perfectly on unicode paths on Python 2,
68 # but nobody should be doing that anyway.
69 if not os_path:
70 os_path = str_to_unicode(e.filename or 'unknown file')
71 path = to_api_path(os_path, root=self.root_dir)
72 raise HTTPError(403, u'Permission denied: %s' % path)
73 else:
74 raise
75
76 def _copy(self, src, dest):
77 """copy src to dest
78
79 like shutil.copy2, but log errors in copystat
80 """
81 shutil.copyfile(src, dest)
82 try:
83 shutil.copystat(src, dest)
84 except OSError:
85 self.log.debug("copystat on %s failed", dest, exc_info=True)
86
87 def _get_os_path(self, path):
88 """Given an API path, return its file system path.
89
90 Parameters
91 ----------
92 path : string
93 The relative API path to the named file.
94
95 Returns
96 -------
97 path : string
98 Native, absolute OS path to for a file.
99 """
100 return to_os_path(path, self.root_dir)
101
102 def _read_notebook(self, os_path, as_version=4):
103 """Read a notebook from an os path."""
104 with self.open(os_path, 'r', encoding='utf-8') as f:
105 try:
106 return nbformat.read(f, as_version=as_version)
107 except Exception as e:
108 raise HTTPError(
109 400,
110 u"Unreadable Notebook: %s %r" % (os_path, e),
111 )
112
113 def _save_notebook(self, os_path, nb):
114 """Save a notebook to an os_path."""
115 with self.atomic_writing(os_path, encoding='utf-8') as f:
116 nbformat.write(nb, f, version=nbformat.NO_CONVERT)
117
118 def _read_file(self, os_path, format):
119 """Read a non-notebook file.
120
121 os_path: The path to be read.
122 format:
123 If 'text', the contents will be decoded as UTF-8.
124 If 'base64', the raw bytes contents will be encoded as base64.
125 If not specified, try to decode as UTF-8, and fall back to base64
126 """
127 if not os.path.isfile(os_path):
128 raise HTTPError(400, "Cannot read non-file %s" % os_path)
129
130 with self.open(os_path, 'rb') as f:
131 bcontent = f.read()
132
133 if format is None or format == 'text':
134 # Try to interpret as unicode if format is unknown or if unicode
135 # was explicitly requested.
136 try:
137 return bcontent.decode('utf8'), 'text'
138 except UnicodeError:
139 if format == 'text':
140 raise HTTPError(
141 400,
142 "%s is not UTF-8 encoded" % os_path,
143 reason='bad format',
144 )
145 return base64.encodestring(bcontent).decode('ascii'), 'base64'
146
147 def _save_file(self, os_path, content, format):
148 """Save content of a generic file."""
149 if format not in {'text', 'base64'}:
150 raise HTTPError(
151 400,
152 "Must specify format of file contents as 'text' or 'base64'",
153 )
154 try:
155 if format == 'text':
156 bcontent = content.encode('utf8')
157 else:
158 b64_bytes = content.encode('ascii')
159 bcontent = base64.decodestring(b64_bytes)
160 except Exception as e:
161 raise HTTPError(
162 400, u'Encoding error saving %s: %s' % (os_path, e)
163 )
164
165 with self.atomic_writing(os_path, text=False) as f:
166 f.write(bcontent)
@@ -1,819 +1,472 b''
1 """A contents manager that uses the local file system for storage."""
1 """A contents manager that uses the local file system for storage."""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import base64
6
7 from contextlib import contextmanager
8 import errno
9 import io
7 import io
10 import os
8 import os
11 import shutil
9 import shutil
12 import mimetypes
10 import mimetypes
13
11
14 from tornado import web
12 from tornado import web
15
13
16 from .manager import (
14 from .filecheckpoints import FileCheckpointManager
17 CheckpointManager,
15 from .fileio import FileManagerMixin
18 ContentsManager,
16 from .manager import ContentsManager
19 )
17
20 from IPython import nbformat
18 from IPython import nbformat
21 from IPython.utils.io import atomic_writing
22 from IPython.utils.importstring import import_item
19 from IPython.utils.importstring import import_item
23 from IPython.utils.path import ensure_dir_exists
24 from IPython.utils.traitlets import Any, Unicode, Bool, TraitError
20 from IPython.utils.traitlets import Any, Unicode, Bool, TraitError
25 from IPython.utils.py3compat import getcwd, string_types, str_to_unicode
21 from IPython.utils.py3compat import getcwd, string_types
26 from IPython.utils import tz
22 from IPython.utils import tz
27 from IPython.html.utils import (
23 from IPython.html.utils import (
28 is_hidden,
24 is_hidden,
29 to_api_path,
25 to_api_path,
30 to_os_path,
31 )
26 )
32
27
33 _script_exporter = None
28 _script_exporter = None
34
29
30
35 def _post_save_script(model, os_path, contents_manager, **kwargs):
31 def _post_save_script(model, os_path, contents_manager, **kwargs):
36 """convert notebooks to Python script after save with nbconvert
32 """convert notebooks to Python script after save with nbconvert
37
33
38 replaces `ipython notebook --script`
34 replaces `ipython notebook --script`
39 """
35 """
40 from IPython.nbconvert.exporters.script import ScriptExporter
36 from IPython.nbconvert.exporters.script import ScriptExporter
41
37
42 if model['type'] != 'notebook':
38 if model['type'] != 'notebook':
43 return
39 return
44
40
45 global _script_exporter
41 global _script_exporter
46 if _script_exporter is None:
42 if _script_exporter is None:
47 _script_exporter = ScriptExporter(parent=contents_manager)
43 _script_exporter = ScriptExporter(parent=contents_manager)
48 log = contents_manager.log
44 log = contents_manager.log
49
45
50 base, ext = os.path.splitext(os_path)
46 base, ext = os.path.splitext(os_path)
51 py_fname = base + '.py'
47 py_fname = base + '.py'
52 script, resources = _script_exporter.from_filename(os_path)
48 script, resources = _script_exporter.from_filename(os_path)
53 script_fname = base + resources.get('output_extension', '.txt')
49 script_fname = base + resources.get('output_extension', '.txt')
54 log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir))
50 log.info("Saving script /%s", to_api_path(script_fname, contents_manager.root_dir))
55 with io.open(script_fname, 'w', encoding='utf-8') as f:
51 with io.open(script_fname, 'w', encoding='utf-8') as f:
56 f.write(script)
52 f.write(script)
57
53
58
54
59 class FileManagerMixin(object):
60 """
61 Mixin for ContentsAPI classes that interact with the filesystem.
62
63 Provides facilities for reading, writing, and copying both notebooks and
64 generic files.
65
66 Shared by FileContentsManager and FileCheckpointManager.
67
68 Note
69 ----
70 Classes using this mixin must provide the following attributes:
71
72 root_dir : unicode
73 A directory against against which API-style paths are to be resolved.
74
75 log : logging.Logger
76 """
77
78 @contextmanager
79 def open(self, os_path, *args, **kwargs):
80 """wrapper around io.open that turns permission errors into 403"""
81 with self.perm_to_403(os_path):
82 with io.open(os_path, *args, **kwargs) as f:
83 yield f
84
85 @contextmanager
86 def atomic_writing(self, os_path, *args, **kwargs):
87 """wrapper around atomic_writing that turns permission errors into 403"""
88 with self.perm_to_403(os_path):
89 with atomic_writing(os_path, *args, **kwargs) as f:
90 yield f
91
92 @contextmanager
93 def perm_to_403(self, os_path=''):
94 """context manager for turning permission errors into 403."""
95 try:
96 yield
97 except OSError as e:
98 if e.errno in {errno.EPERM, errno.EACCES}:
99 # make 403 error message without root prefix
100 # this may not work perfectly on unicode paths on Python 2,
101 # but nobody should be doing that anyway.
102 if not os_path:
103 os_path = str_to_unicode(e.filename or 'unknown file')
104 path = to_api_path(os_path, root=self.root_dir)
105 raise web.HTTPError(403, u'Permission denied: %s' % path)
106 else:
107 raise
108
109 def _copy(self, src, dest):
110 """copy src to dest
111
112 like shutil.copy2, but log errors in copystat
113 """
114 shutil.copyfile(src, dest)
115 try:
116 shutil.copystat(src, dest)
117 except OSError:
118 self.log.debug("copystat on %s failed", dest, exc_info=True)
119
120 def _get_os_path(self, path):
121 """Given an API path, return its file system path.
122
123 Parameters
124 ----------
125 path : string
126 The relative API path to the named file.
127
128 Returns
129 -------
130 path : string
131 Native, absolute OS path to for a file.
132 """
133 return to_os_path(path, self.root_dir)
134
135 def _read_notebook(self, os_path, as_version=4):
136 """Read a notebook from an os path."""
137 with self.open(os_path, 'r', encoding='utf-8') as f:
138 try:
139 return nbformat.read(f, as_version=as_version)
140 except Exception as e:
141 raise web.HTTPError(
142 400,
143 u"Unreadable Notebook: %s %r" % (os_path, e),
144 )
145
146 def _save_notebook(self, os_path, nb):
147 """Save a notebook to an os_path."""
148 with self.atomic_writing(os_path, encoding='utf-8') as f:
149 nbformat.write(nb, f, version=nbformat.NO_CONVERT)
150
151 def _read_file(self, os_path, format):
152 """Read a non-notebook file.
153
154 os_path: The path to be read.
155 format:
156 If 'text', the contents will be decoded as UTF-8.
157 If 'base64', the raw bytes contents will be encoded as base64.
158 If not specified, try to decode as UTF-8, and fall back to base64
159 """
160 if not os.path.isfile(os_path):
161 raise web.HTTPError(400, "Cannot read non-file %s" % os_path)
162
163 with self.open(os_path, 'rb') as f:
164 bcontent = f.read()
165
166 if format is None or format == 'text':
167 # Try to interpret as unicode if format is unknown or if unicode
168 # was explicitly requested.
169 try:
170 return bcontent.decode('utf8'), 'text'
171 except UnicodeError as e:
172 if format == 'text':
173 raise web.HTTPError(
174 400,
175 "%s is not UTF-8 encoded" % os_path,
176 reason='bad format',
177 )
178 return base64.encodestring(bcontent).decode('ascii'), 'base64'
179
180 def _save_file(self, os_path, content, format):
181 """Save content of a generic file."""
182 if format not in {'text', 'base64'}:
183 raise web.HTTPError(
184 400,
185 "Must specify format of file contents as 'text' or 'base64'",
186 )
187 try:
188 if format == 'text':
189 bcontent = content.encode('utf8')
190 else:
191 b64_bytes = content.encode('ascii')
192 bcontent = base64.decodestring(b64_bytes)
193 except Exception as e:
194 raise web.HTTPError(400, u'Encoding error saving %s: %s' % (os_path, e))
195
196 with self.atomic_writing(os_path, text=False) as f:
197 f.write(bcontent)
198
199
200 class FileCheckpointManager(FileManagerMixin, CheckpointManager):
201 """
202 A CheckpointManager that caches checkpoints for files in adjacent
203 directories.
204 """
205
206 checkpoint_dir = Unicode(
207 '.ipynb_checkpoints',
208 config=True,
209 help="""The directory name in which to keep file checkpoints
210
211 This is a path relative to the file's own directory.
212
213 By default, it is .ipynb_checkpoints
214 """,
215 )
216
217 root_dir = Unicode(config=True)
218
219 def _root_dir_default(self):
220 try:
221 return self.parent.root_dir
222 except AttributeError:
223 return getcwd()
224
225 # ContentsManager-dependent checkpoint API
226 def create_checkpoint(self, contents_mgr, path):
227 """
228 Create a checkpoint.
229
230 If contents_mgr is backed by the local filesystem, just copy the
231 appropriate file to the checkpoint directory. Otherwise, ask the
232 ContentsManager for a model and write it ourselves.
233 """
234 if contents_mgr.backend == 'local_file':
235 # We know that the file is in the local filesystem, so just copy
236 # from the base location to our location.
237 checkpoint_id = u'checkpoint'
238 src_path = contents_mgr._get_os_path(path)
239 dest_path = self.checkpoint_path(checkpoint_id, path)
240 self._copy(src_path, dest_path)
241 return self.checkpoint_model(checkpoint_id, dest_path)
242 else:
243 return super(FileCheckpointManager, self).create_checkpoint(
244 contents_mgr, path,
245 )
246
247 def restore_checkpoint(self, contents_mgr, checkpoint_id, path):
248 """
249 Restore a checkpoint.
250
251 If contents_mgr is backed by the local filesystem, just copy the
252 appropriate file from the checkpoint directory. Otherwise, load the
253 model and pass it to ContentsManager.save.
254 """
255 if contents_mgr.backend == 'local_file':
256 # We know that the file is in the local filesystem, so just copy
257 # from our base location to the location expected by content
258 src_path = self.checkpoint_path(checkpoint_id, path)
259 dest_path = contents_mgr._get_os_path(path)
260 self._copy(src_path, dest_path)
261 else:
262 super(FileCheckpointManager, self).restore_checkpoint(
263 contents_mgr, checkpoint_id, path
264 )
265
266 # ContentsManager-independent checkpoint API
267 def rename_checkpoint(self, checkpoint_id, old_path, new_path):
268 """Rename a checkpoint from old_path to new_path."""
269 old_cp_path = self.checkpoint_path(checkpoint_id, old_path)
270 new_cp_path = self.checkpoint_path(checkpoint_id, new_path)
271 if os.path.isfile(old_cp_path):
272 self.log.debug(
273 "Renaming checkpoint %s -> %s",
274 old_cp_path,
275 new_cp_path,
276 )
277 with self.perm_to_403():
278 shutil.move(old_cp_path, new_cp_path)
279
280 def delete_checkpoint(self, checkpoint_id, path):
281 """delete a file's checkpoint"""
282 path = path.strip('/')
283 cp_path = self.checkpoint_path(checkpoint_id, path)
284 if not os.path.isfile(cp_path):
285 self.no_such_checkpoint(path, checkpoint_id)
286
287 self.log.debug("unlinking %s", cp_path)
288 with self.perm_to_403():
289 os.unlink(cp_path)
290
291 def list_checkpoints(self, path):
292 """list the checkpoints for a given file
293
294 This contents manager currently only supports one checkpoint per file.
295 """
296 path = path.strip('/')
297 checkpoint_id = "checkpoint"
298 os_path = self.checkpoint_path(checkpoint_id, path)
299 if not os.path.isfile(os_path):
300 return []
301 else:
302 return [self.checkpoint_model(checkpoint_id, os_path)]
303
304 # Checkpoint-related utilities
305 def checkpoint_path(self, checkpoint_id, path):
306 """find the path to a checkpoint"""
307 path = path.strip('/')
308 parent, name = ('/' + path).rsplit('/', 1)
309 parent = parent.strip('/')
310 basename, ext = os.path.splitext(name)
311 filename = u"{name}-{checkpoint_id}{ext}".format(
312 name=basename,
313 checkpoint_id=checkpoint_id,
314 ext=ext,
315 )
316 os_path = self._get_os_path(path=parent)
317 cp_dir = os.path.join(os_path, self.checkpoint_dir)
318 with self.perm_to_403():
319 ensure_dir_exists(cp_dir)
320 cp_path = os.path.join(cp_dir, filename)
321 return cp_path
322
323 def checkpoint_model(self, checkpoint_id, os_path):
324 """construct the info dict for a given checkpoint"""
325 stats = os.stat(os_path)
326 last_modified = tz.utcfromtimestamp(stats.st_mtime)
327 info = dict(
328 id=checkpoint_id,
329 last_modified=last_modified,
330 )
331 return info
332
333 def create_file_checkpoint(self, content, format, path):
334 """Create a checkpoint from the current content of a notebook."""
335 path = path.strip('/')
336 # only the one checkpoint ID:
337 checkpoint_id = u"checkpoint"
338 os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
339 self.log.debug("creating checkpoint for %s", path)
340 with self.perm_to_403():
341 self._save_file(os_checkpoint_path, content, format=format)
342
343 # return the checkpoint info
344 return self.checkpoint_model(checkpoint_id, os_checkpoint_path)
345
346 def create_notebook_checkpoint(self, nb, path):
347 """Create a checkpoint from the current content of a notebook."""
348 path = path.strip('/')
349 # only the one checkpoint ID:
350 checkpoint_id = u"checkpoint"
351 os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
352 self.log.debug("creating checkpoint for %s", path)
353 with self.perm_to_403():
354 self._save_notebook(os_checkpoint_path, nb)
355
356 # return the checkpoint info
357 return self.checkpoint_model(checkpoint_id, os_checkpoint_path)
358
359 def get_checkpoint(self, checkpoint_id, path, type):
360 """Get the content of a checkpoint.
361
362 Returns a model suitable for passing to ContentsManager.save.
363 """
364 path = path.strip('/')
365 self.log.info("restoring %s from checkpoint %s", path, checkpoint_id)
366 os_checkpoint_path = self.checkpoint_path(checkpoint_id, path)
367 if not os.path.isfile(os_checkpoint_path):
368 self.no_such_checkpoint(path, checkpoint_id)
369
370 if type == 'notebook':
371 return {
372 'type': type,
373 'content': self._read_notebook(
374 os_checkpoint_path,
375 as_version=4,
376 ),
377 }
378 elif type == 'file':
379 content, format = self._read_file(os_checkpoint_path, format=None)
380 return {
381 'type': type,
382 'content': content,
383 'format': format,
384 }
385 else:
386 raise web.HTTPError(
387 500,
388 u'Unexpected type %s' % type
389 )
390
391 # Error Handling
392 def no_such_checkpoint(self, path, checkpoint_id):
393 raise web.HTTPError(
394 404,
395 u'Checkpoint does not exist: %s@%s' % (path, checkpoint_id)
396 )
397
398
399 class FileContentsManager(FileManagerMixin, ContentsManager):
55 class FileContentsManager(FileManagerMixin, ContentsManager):
400
56
401 root_dir = Unicode(config=True)
57 root_dir = Unicode(config=True)
402
58
403 def _root_dir_default(self):
59 def _root_dir_default(self):
404 try:
60 try:
405 return self.parent.notebook_dir
61 return self.parent.notebook_dir
406 except AttributeError:
62 except AttributeError:
407 return getcwd()
63 return getcwd()
408
64
409 save_script = Bool(False, config=True, help='DEPRECATED, use post_save_hook')
65 save_script = Bool(False, config=True, help='DEPRECATED, use post_save_hook')
410 def _save_script_changed(self):
66 def _save_script_changed(self):
411 self.log.warn("""
67 self.log.warn("""
412 `--script` is deprecated. You can trigger nbconvert via pre- or post-save hooks:
68 `--script` is deprecated. You can trigger nbconvert via pre- or post-save hooks:
413
69
414 ContentsManager.pre_save_hook
70 ContentsManager.pre_save_hook
415 FileContentsManager.post_save_hook
71 FileContentsManager.post_save_hook
416
72
417 A post-save hook has been registered that calls:
73 A post-save hook has been registered that calls:
418
74
419 ipython nbconvert --to script [notebook]
75 ipython nbconvert --to script [notebook]
420
76
421 which behaves similarly to `--script`.
77 which behaves similarly to `--script`.
422 """)
78 """)
423
79
424 self.post_save_hook = _post_save_script
80 self.post_save_hook = _post_save_script
425
81
426 post_save_hook = Any(None, config=True,
82 post_save_hook = Any(None, config=True,
427 help="""Python callable or importstring thereof
83 help="""Python callable or importstring thereof
428
84
429 to be called on the path of a file just saved.
85 to be called on the path of a file just saved.
430
86
431 This can be used to process the file on disk,
87 This can be used to process the file on disk,
432 such as converting the notebook to a script or HTML via nbconvert.
88 such as converting the notebook to a script or HTML via nbconvert.
433
89
434 It will be called as (all arguments passed by keyword):
90 It will be called as (all arguments passed by keyword):
435
91
436 hook(os_path=os_path, model=model, contents_manager=instance)
92 hook(os_path=os_path, model=model, contents_manager=instance)
437
93
438 path: the filesystem path to the file just written
94 path: the filesystem path to the file just written
439 model: the model representing the file
95 model: the model representing the file
440 contents_manager: this ContentsManager instance
96 contents_manager: this ContentsManager instance
441 """
97 """
442 )
98 )
443 def _post_save_hook_changed(self, name, old, new):
99 def _post_save_hook_changed(self, name, old, new):
444 if new and isinstance(new, string_types):
100 if new and isinstance(new, string_types):
445 self.post_save_hook = import_item(self.post_save_hook)
101 self.post_save_hook = import_item(self.post_save_hook)
446 elif new:
102 elif new:
447 if not callable(new):
103 if not callable(new):
448 raise TraitError("post_save_hook must be callable")
104 raise TraitError("post_save_hook must be callable")
449
105
450 def run_post_save_hook(self, model, os_path):
106 def run_post_save_hook(self, model, os_path):
451 """Run the post-save hook if defined, and log errors"""
107 """Run the post-save hook if defined, and log errors"""
452 if self.post_save_hook:
108 if self.post_save_hook:
453 try:
109 try:
454 self.log.debug("Running post-save hook on %s", os_path)
110 self.log.debug("Running post-save hook on %s", os_path)
455 self.post_save_hook(os_path=os_path, model=model, contents_manager=self)
111 self.post_save_hook(os_path=os_path, model=model, contents_manager=self)
456 except Exception:
112 except Exception:
457 self.log.error("Post-save hook failed on %s", os_path, exc_info=True)
113 self.log.error("Post-save hook failed on %s", os_path, exc_info=True)
458
114
459 def _root_dir_changed(self, name, old, new):
115 def _root_dir_changed(self, name, old, new):
460 """Do a bit of validation of the root_dir."""
116 """Do a bit of validation of the root_dir."""
461 if not os.path.isabs(new):
117 if not os.path.isabs(new):
462 # If we receive a non-absolute path, make it absolute.
118 # If we receive a non-absolute path, make it absolute.
463 self.root_dir = os.path.abspath(new)
119 self.root_dir = os.path.abspath(new)
464 return
120 return
465 if not os.path.isdir(new):
121 if not os.path.isdir(new):
466 raise TraitError("%r is not a directory" % new)
122 raise TraitError("%r is not a directory" % new)
467
123
468 def _checkpoint_manager_class_default(self):
124 def _checkpoint_manager_class_default(self):
469 return FileCheckpointManager
125 return FileCheckpointManager
470
126
471 def _backend_default(self):
472 return 'local_file'
473
474 def is_hidden(self, path):
127 def is_hidden(self, path):
475 """Does the API style path correspond to a hidden directory or file?
128 """Does the API style path correspond to a hidden directory or file?
476
129
477 Parameters
130 Parameters
478 ----------
131 ----------
479 path : string
132 path : string
480 The path to check. This is an API path (`/` separated,
133 The path to check. This is an API path (`/` separated,
481 relative to root_dir).
134 relative to root_dir).
482
135
483 Returns
136 Returns
484 -------
137 -------
485 hidden : bool
138 hidden : bool
486 Whether the path exists and is hidden.
139 Whether the path exists and is hidden.
487 """
140 """
488 path = path.strip('/')
141 path = path.strip('/')
489 os_path = self._get_os_path(path=path)
142 os_path = self._get_os_path(path=path)
490 return is_hidden(os_path, self.root_dir)
143 return is_hidden(os_path, self.root_dir)
491
144
492 def file_exists(self, path):
145 def file_exists(self, path):
493 """Returns True if the file exists, else returns False.
146 """Returns True if the file exists, else returns False.
494
147
495 API-style wrapper for os.path.isfile
148 API-style wrapper for os.path.isfile
496
149
497 Parameters
150 Parameters
498 ----------
151 ----------
499 path : string
152 path : string
500 The relative path to the file (with '/' as separator)
153 The relative path to the file (with '/' as separator)
501
154
502 Returns
155 Returns
503 -------
156 -------
504 exists : bool
157 exists : bool
505 Whether the file exists.
158 Whether the file exists.
506 """
159 """
507 path = path.strip('/')
160 path = path.strip('/')
508 os_path = self._get_os_path(path)
161 os_path = self._get_os_path(path)
509 return os.path.isfile(os_path)
162 return os.path.isfile(os_path)
510
163
511 def dir_exists(self, path):
164 def dir_exists(self, path):
512 """Does the API-style path refer to an extant directory?
165 """Does the API-style path refer to an extant directory?
513
166
514 API-style wrapper for os.path.isdir
167 API-style wrapper for os.path.isdir
515
168
516 Parameters
169 Parameters
517 ----------
170 ----------
518 path : string
171 path : string
519 The path to check. This is an API path (`/` separated,
172 The path to check. This is an API path (`/` separated,
520 relative to root_dir).
173 relative to root_dir).
521
174
522 Returns
175 Returns
523 -------
176 -------
524 exists : bool
177 exists : bool
525 Whether the path is indeed a directory.
178 Whether the path is indeed a directory.
526 """
179 """
527 path = path.strip('/')
180 path = path.strip('/')
528 os_path = self._get_os_path(path=path)
181 os_path = self._get_os_path(path=path)
529 return os.path.isdir(os_path)
182 return os.path.isdir(os_path)
530
183
531 def exists(self, path):
184 def exists(self, path):
532 """Returns True if the path exists, else returns False.
185 """Returns True if the path exists, else returns False.
533
186
534 API-style wrapper for os.path.exists
187 API-style wrapper for os.path.exists
535
188
536 Parameters
189 Parameters
537 ----------
190 ----------
538 path : string
191 path : string
539 The API path to the file (with '/' as separator)
192 The API path to the file (with '/' as separator)
540
193
541 Returns
194 Returns
542 -------
195 -------
543 exists : bool
196 exists : bool
544 Whether the target exists.
197 Whether the target exists.
545 """
198 """
546 path = path.strip('/')
199 path = path.strip('/')
547 os_path = self._get_os_path(path=path)
200 os_path = self._get_os_path(path=path)
548 return os.path.exists(os_path)
201 return os.path.exists(os_path)
549
202
550 def _base_model(self, path):
203 def _base_model(self, path):
551 """Build the common base of a contents model"""
204 """Build the common base of a contents model"""
552 os_path = self._get_os_path(path)
205 os_path = self._get_os_path(path)
553 info = os.stat(os_path)
206 info = os.stat(os_path)
554 last_modified = tz.utcfromtimestamp(info.st_mtime)
207 last_modified = tz.utcfromtimestamp(info.st_mtime)
555 created = tz.utcfromtimestamp(info.st_ctime)
208 created = tz.utcfromtimestamp(info.st_ctime)
556 # Create the base model.
209 # Create the base model.
557 model = {}
210 model = {}
558 model['name'] = path.rsplit('/', 1)[-1]
211 model['name'] = path.rsplit('/', 1)[-1]
559 model['path'] = path
212 model['path'] = path
560 model['last_modified'] = last_modified
213 model['last_modified'] = last_modified
561 model['created'] = created
214 model['created'] = created
562 model['content'] = None
215 model['content'] = None
563 model['format'] = None
216 model['format'] = None
564 model['mimetype'] = None
217 model['mimetype'] = None
565 try:
218 try:
566 model['writable'] = os.access(os_path, os.W_OK)
219 model['writable'] = os.access(os_path, os.W_OK)
567 except OSError:
220 except OSError:
568 self.log.error("Failed to check write permissions on %s", os_path)
221 self.log.error("Failed to check write permissions on %s", os_path)
569 model['writable'] = False
222 model['writable'] = False
570 return model
223 return model
571
224
572 def _dir_model(self, path, content=True):
225 def _dir_model(self, path, content=True):
573 """Build a model for a directory
226 """Build a model for a directory
574
227
575 if content is requested, will include a listing of the directory
228 if content is requested, will include a listing of the directory
576 """
229 """
577 os_path = self._get_os_path(path)
230 os_path = self._get_os_path(path)
578
231
579 four_o_four = u'directory does not exist: %r' % path
232 four_o_four = u'directory does not exist: %r' % path
580
233
581 if not os.path.isdir(os_path):
234 if not os.path.isdir(os_path):
582 raise web.HTTPError(404, four_o_four)
235 raise web.HTTPError(404, four_o_four)
583 elif is_hidden(os_path, self.root_dir):
236 elif is_hidden(os_path, self.root_dir):
584 self.log.info("Refusing to serve hidden directory %r, via 404 Error",
237 self.log.info("Refusing to serve hidden directory %r, via 404 Error",
585 os_path
238 os_path
586 )
239 )
587 raise web.HTTPError(404, four_o_four)
240 raise web.HTTPError(404, four_o_four)
588
241
589 model = self._base_model(path)
242 model = self._base_model(path)
590 model['type'] = 'directory'
243 model['type'] = 'directory'
591 if content:
244 if content:
592 model['content'] = contents = []
245 model['content'] = contents = []
593 os_dir = self._get_os_path(path)
246 os_dir = self._get_os_path(path)
594 for name in os.listdir(os_dir):
247 for name in os.listdir(os_dir):
595 os_path = os.path.join(os_dir, name)
248 os_path = os.path.join(os_dir, name)
596 # skip over broken symlinks in listing
249 # skip over broken symlinks in listing
597 if not os.path.exists(os_path):
250 if not os.path.exists(os_path):
598 self.log.warn("%s doesn't exist", os_path)
251 self.log.warn("%s doesn't exist", os_path)
599 continue
252 continue
600 elif not os.path.isfile(os_path) and not os.path.isdir(os_path):
253 elif not os.path.isfile(os_path) and not os.path.isdir(os_path):
601 self.log.debug("%s not a regular file", os_path)
254 self.log.debug("%s not a regular file", os_path)
602 continue
255 continue
603 if self.should_list(name) and not is_hidden(os_path, self.root_dir):
256 if self.should_list(name) and not is_hidden(os_path, self.root_dir):
604 contents.append(self.get(
257 contents.append(self.get(
605 path='%s/%s' % (path, name),
258 path='%s/%s' % (path, name),
606 content=False)
259 content=False)
607 )
260 )
608
261
609 model['format'] = 'json'
262 model['format'] = 'json'
610
263
611 return model
264 return model
612
265
613 def _file_model(self, path, content=True, format=None):
266 def _file_model(self, path, content=True, format=None):
614 """Build a model for a file
267 """Build a model for a file
615
268
616 if content is requested, include the file contents.
269 if content is requested, include the file contents.
617
270
618 format:
271 format:
619 If 'text', the contents will be decoded as UTF-8.
272 If 'text', the contents will be decoded as UTF-8.
620 If 'base64', the raw bytes contents will be encoded as base64.
273 If 'base64', the raw bytes contents will be encoded as base64.
621 If not specified, try to decode as UTF-8, and fall back to base64
274 If not specified, try to decode as UTF-8, and fall back to base64
622 """
275 """
623 model = self._base_model(path)
276 model = self._base_model(path)
624 model['type'] = 'file'
277 model['type'] = 'file'
625
278
626 os_path = self._get_os_path(path)
279 os_path = self._get_os_path(path)
627
280
628 if content:
281 if content:
629 content, format = self._read_file(os_path, format)
282 content, format = self._read_file(os_path, format)
630 default_mime = {
283 default_mime = {
631 'text': 'text/plain',
284 'text': 'text/plain',
632 'base64': 'application/octet-stream'
285 'base64': 'application/octet-stream'
633 }[format]
286 }[format]
634
287
635 model.update(
288 model.update(
636 content=content,
289 content=content,
637 format=format,
290 format=format,
638 mimetype=mimetypes.guess_type(os_path)[0] or default_mime,
291 mimetype=mimetypes.guess_type(os_path)[0] or default_mime,
639 )
292 )
640
293
641 return model
294 return model
642
295
643 def _notebook_model(self, path, content=True):
296 def _notebook_model(self, path, content=True):
644 """Build a notebook model
297 """Build a notebook model
645
298
646 if content is requested, the notebook content will be populated
299 if content is requested, the notebook content will be populated
647 as a JSON structure (not double-serialized)
300 as a JSON structure (not double-serialized)
648 """
301 """
649 model = self._base_model(path)
302 model = self._base_model(path)
650 model['type'] = 'notebook'
303 model['type'] = 'notebook'
651 if content:
304 if content:
652 os_path = self._get_os_path(path)
305 os_path = self._get_os_path(path)
653 nb = self._read_notebook(os_path, as_version=4)
306 nb = self._read_notebook(os_path, as_version=4)
654 self.mark_trusted_cells(nb, path)
307 self.mark_trusted_cells(nb, path)
655 model['content'] = nb
308 model['content'] = nb
656 model['format'] = 'json'
309 model['format'] = 'json'
657 self.validate_notebook_model(model)
310 self.validate_notebook_model(model)
658 return model
311 return model
659
312
660 def get(self, path, content=True, type=None, format=None):
313 def get(self, path, content=True, type=None, format=None):
661 """ Takes a path for an entity and returns its model
314 """ Takes a path for an entity and returns its model
662
315
663 Parameters
316 Parameters
664 ----------
317 ----------
665 path : str
318 path : str
666 the API path that describes the relative path for the target
319 the API path that describes the relative path for the target
667 content : bool
320 content : bool
668 Whether to include the contents in the reply
321 Whether to include the contents in the reply
669 type : str, optional
322 type : str, optional
670 The requested type - 'file', 'notebook', or 'directory'.
323 The requested type - 'file', 'notebook', or 'directory'.
671 Will raise HTTPError 400 if the content doesn't match.
324 Will raise HTTPError 400 if the content doesn't match.
672 format : str, optional
325 format : str, optional
673 The requested format for file contents. 'text' or 'base64'.
326 The requested format for file contents. 'text' or 'base64'.
674 Ignored if this returns a notebook or directory model.
327 Ignored if this returns a notebook or directory model.
675
328
676 Returns
329 Returns
677 -------
330 -------
678 model : dict
331 model : dict
679 the contents model. If content=True, returns the contents
332 the contents model. If content=True, returns the contents
680 of the file or directory as well.
333 of the file or directory as well.
681 """
334 """
682 path = path.strip('/')
335 path = path.strip('/')
683
336
684 if not self.exists(path):
337 if not self.exists(path):
685 raise web.HTTPError(404, u'No such file or directory: %s' % path)
338 raise web.HTTPError(404, u'No such file or directory: %s' % path)
686
339
687 os_path = self._get_os_path(path)
340 os_path = self._get_os_path(path)
688 if os.path.isdir(os_path):
341 if os.path.isdir(os_path):
689 if type not in (None, 'directory'):
342 if type not in (None, 'directory'):
690 raise web.HTTPError(400,
343 raise web.HTTPError(400,
691 u'%s is a directory, not a %s' % (path, type), reason='bad type')
344 u'%s is a directory, not a %s' % (path, type), reason='bad type')
692 model = self._dir_model(path, content=content)
345 model = self._dir_model(path, content=content)
693 elif type == 'notebook' or (type is None and path.endswith('.ipynb')):
346 elif type == 'notebook' or (type is None and path.endswith('.ipynb')):
694 model = self._notebook_model(path, content=content)
347 model = self._notebook_model(path, content=content)
695 else:
348 else:
696 if type == 'directory':
349 if type == 'directory':
697 raise web.HTTPError(400,
350 raise web.HTTPError(400,
698 u'%s is not a directory', reason='bad type')
351 u'%s is not a directory', reason='bad type')
699 model = self._file_model(path, content=content, format=format)
352 model = self._file_model(path, content=content, format=format)
700 return model
353 return model
701
354
702 def _save_directory(self, os_path, model, path=''):
355 def _save_directory(self, os_path, model, path=''):
703 """create a directory"""
356 """create a directory"""
704 if is_hidden(os_path, self.root_dir):
357 if is_hidden(os_path, self.root_dir):
705 raise web.HTTPError(400, u'Cannot create hidden directory %r' % os_path)
358 raise web.HTTPError(400, u'Cannot create hidden directory %r' % os_path)
706 if not os.path.exists(os_path):
359 if not os.path.exists(os_path):
707 with self.perm_to_403():
360 with self.perm_to_403():
708 os.mkdir(os_path)
361 os.mkdir(os_path)
709 elif not os.path.isdir(os_path):
362 elif not os.path.isdir(os_path):
710 raise web.HTTPError(400, u'Not a directory: %s' % (os_path))
363 raise web.HTTPError(400, u'Not a directory: %s' % (os_path))
711 else:
364 else:
712 self.log.debug("Directory %r already exists", os_path)
365 self.log.debug("Directory %r already exists", os_path)
713
366
714 def save(self, model, path=''):
367 def save(self, model, path=''):
715 """Save the file model and return the model with no content."""
368 """Save the file model and return the model with no content."""
716 path = path.strip('/')
369 path = path.strip('/')
717
370
718 if 'type' not in model:
371 if 'type' not in model:
719 raise web.HTTPError(400, u'No file type provided')
372 raise web.HTTPError(400, u'No file type provided')
720 if 'content' not in model and model['type'] != 'directory':
373 if 'content' not in model and model['type'] != 'directory':
721 raise web.HTTPError(400, u'No file content provided')
374 raise web.HTTPError(400, u'No file content provided')
722
375
723 self.run_pre_save_hook(model=model, path=path)
376 self.run_pre_save_hook(model=model, path=path)
724
377
725 os_path = self._get_os_path(path)
378 os_path = self._get_os_path(path)
726 self.log.debug("Saving %s", os_path)
379 self.log.debug("Saving %s", os_path)
727 try:
380 try:
728 if model['type'] == 'notebook':
381 if model['type'] == 'notebook':
729 nb = nbformat.from_dict(model['content'])
382 nb = nbformat.from_dict(model['content'])
730 self.check_and_sign(nb, path)
383 self.check_and_sign(nb, path)
731 self._save_notebook(os_path, nb)
384 self._save_notebook(os_path, nb)
732 # One checkpoint should always exist for notebooks.
385 # One checkpoint should always exist for notebooks.
733 if not self.checkpoint_manager.list_checkpoints(path):
386 if not self.checkpoint_manager.list_checkpoints(path):
734 self.create_checkpoint(path)
387 self.create_checkpoint(path)
735 elif model['type'] == 'file':
388 elif model['type'] == 'file':
736 # Missing format will be handled internally by _save_file.
389 # Missing format will be handled internally by _save_file.
737 self._save_file(os_path, model['content'], model.get('format'))
390 self._save_file(os_path, model['content'], model.get('format'))
738 elif model['type'] == 'directory':
391 elif model['type'] == 'directory':
739 self._save_directory(os_path, model, path)
392 self._save_directory(os_path, model, path)
740 else:
393 else:
741 raise web.HTTPError(400, "Unhandled contents type: %s" % model['type'])
394 raise web.HTTPError(400, "Unhandled contents type: %s" % model['type'])
742 except web.HTTPError:
395 except web.HTTPError:
743 raise
396 raise
744 except Exception as e:
397 except Exception as e:
745 self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True)
398 self.log.error(u'Error while saving file: %s %s', path, e, exc_info=True)
746 raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e))
399 raise web.HTTPError(500, u'Unexpected error while saving file: %s %s' % (path, e))
747
400
748 validation_message = None
401 validation_message = None
749 if model['type'] == 'notebook':
402 if model['type'] == 'notebook':
750 self.validate_notebook_model(model)
403 self.validate_notebook_model(model)
751 validation_message = model.get('message', None)
404 validation_message = model.get('message', None)
752
405
753 model = self.get(path, content=False)
406 model = self.get(path, content=False)
754 if validation_message:
407 if validation_message:
755 model['message'] = validation_message
408 model['message'] = validation_message
756
409
757 self.run_post_save_hook(model=model, os_path=os_path)
410 self.run_post_save_hook(model=model, os_path=os_path)
758
411
759 return model
412 return model
760
413
761 def delete_file(self, path):
414 def delete_file(self, path):
762 """Delete file at path."""
415 """Delete file at path."""
763 path = path.strip('/')
416 path = path.strip('/')
764 os_path = self._get_os_path(path)
417 os_path = self._get_os_path(path)
765 rm = os.unlink
418 rm = os.unlink
766 if os.path.isdir(os_path):
419 if os.path.isdir(os_path):
767 listing = os.listdir(os_path)
420 listing = os.listdir(os_path)
768 # Don't delete non-empty directories.
421 # Don't delete non-empty directories.
769 # A directory containing only leftover checkpoints is
422 # A directory containing only leftover checkpoints is
770 # considered empty.
423 # considered empty.
771 cp_dir = getattr(self.checkpoint_manager, 'checkpoint_dir', None)
424 cp_dir = getattr(self.checkpoint_manager, 'checkpoint_dir', None)
772 for entry in listing:
425 for entry in listing:
773 if entry != cp_dir:
426 if entry != cp_dir:
774 raise web.HTTPError(400, u'Directory %s not empty' % os_path)
427 raise web.HTTPError(400, u'Directory %s not empty' % os_path)
775 elif not os.path.isfile(os_path):
428 elif not os.path.isfile(os_path):
776 raise web.HTTPError(404, u'File does not exist: %s' % os_path)
429 raise web.HTTPError(404, u'File does not exist: %s' % os_path)
777
430
778 if os.path.isdir(os_path):
431 if os.path.isdir(os_path):
779 self.log.debug("Removing directory %s", os_path)
432 self.log.debug("Removing directory %s", os_path)
780 with self.perm_to_403():
433 with self.perm_to_403():
781 shutil.rmtree(os_path)
434 shutil.rmtree(os_path)
782 else:
435 else:
783 self.log.debug("Unlinking file %s", os_path)
436 self.log.debug("Unlinking file %s", os_path)
784 with self.perm_to_403():
437 with self.perm_to_403():
785 rm(os_path)
438 rm(os_path)
786
439
787 def rename_file(self, old_path, new_path):
440 def rename_file(self, old_path, new_path):
788 """Rename a file."""
441 """Rename a file."""
789 old_path = old_path.strip('/')
442 old_path = old_path.strip('/')
790 new_path = new_path.strip('/')
443 new_path = new_path.strip('/')
791 if new_path == old_path:
444 if new_path == old_path:
792 return
445 return
793
446
794 new_os_path = self._get_os_path(new_path)
447 new_os_path = self._get_os_path(new_path)
795 old_os_path = self._get_os_path(old_path)
448 old_os_path = self._get_os_path(old_path)
796
449
797 # Should we proceed with the move?
450 # Should we proceed with the move?
798 if os.path.exists(new_os_path):
451 if os.path.exists(new_os_path):
799 raise web.HTTPError(409, u'File already exists: %s' % new_path)
452 raise web.HTTPError(409, u'File already exists: %s' % new_path)
800
453
801 # Move the file
454 # Move the file
802 try:
455 try:
803 with self.perm_to_403():
456 with self.perm_to_403():
804 shutil.move(old_os_path, new_os_path)
457 shutil.move(old_os_path, new_os_path)
805 except web.HTTPError:
458 except web.HTTPError:
806 raise
459 raise
807 except Exception as e:
460 except Exception as e:
808 raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % (old_path, e))
461 raise web.HTTPError(500, u'Unknown error renaming file: %s %s' % (old_path, e))
809
462
810 def info_string(self):
463 def info_string(self):
811 return "Serving notebooks from local directory: %s" % self.root_dir
464 return "Serving notebooks from local directory: %s" % self.root_dir
812
465
813 def get_kernel_path(self, path, model=None):
466 def get_kernel_path(self, path, model=None):
814 """Return the initial API path of a kernel associated with a given notebook"""
467 """Return the initial API path of a kernel associated with a given notebook"""
815 if '/' in path:
468 if '/' in path:
816 parent_dir = path.rsplit('/', 1)[0]
469 parent_dir = path.rsplit('/', 1)[0]
817 else:
470 else:
818 parent_dir = ''
471 parent_dir = ''
819 return parent_dir
472 return parent_dir
@@ -1,539 +1,468 b''
1 """A base class for contents managers."""
1 """A base class for contents managers."""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from fnmatch import fnmatch
6 from fnmatch import fnmatch
7 import itertools
7 import itertools
8 import json
8 import json
9 import os
9 import os
10 import re
10 import re
11
11
12 from tornado.web import HTTPError
12 from tornado.web import HTTPError
13
13
14 from .checkpoints import CheckpointManager
14 from IPython.config.configurable import LoggingConfigurable
15 from IPython.config.configurable import LoggingConfigurable
15 from IPython.nbformat import sign, validate, ValidationError
16 from IPython.nbformat import sign, validate, ValidationError
16 from IPython.nbformat.v4 import new_notebook
17 from IPython.nbformat.v4 import new_notebook
17 from IPython.utils.importstring import import_item
18 from IPython.utils.importstring import import_item
18 from IPython.utils.traitlets import (
19 from IPython.utils.traitlets import (
19 Any,
20 Any,
20 Dict,
21 Dict,
21 Instance,
22 Instance,
22 List,
23 List,
23 TraitError,
24 TraitError,
24 Type,
25 Type,
25 Unicode,
26 Unicode,
26 )
27 )
27 from IPython.utils.py3compat import string_types
28 from IPython.utils.py3compat import string_types
28
29
29 copy_pat = re.compile(r'\-Copy\d*\.')
30 copy_pat = re.compile(r'\-Copy\d*\.')
30
31
31
32
32 class CheckpointManager(LoggingConfigurable):
33 """
34 Base class for managing checkpoints for a ContentsManager.
35 """
36
37 def create_checkpoint(self, contents_mgr, path):
38 model = contents_mgr.get(path, content=True)
39 type = model['type']
40 if type == 'notebook':
41 return self.create_notebook_checkpoint(
42 model['content'],
43 path,
44 )
45 elif type == 'file':
46 return self.create_file_checkpoint(
47 model['content'],
48 model['format'],
49 path,
50 )
51
52 def restore_checkpoint(self, contents_mgr, checkpoint_id, path):
53 """Restore a checkpoint."""
54 type = contents_mgr.get(path, content=False)['type']
55 model = self.get_checkpoint(checkpoint_id, path, type)
56 contents_mgr.save(model, path)
57
58 def create_file_checkpoint(self, content, format, path):
59 """Create a checkpoint of the current state of a file
60
61 Returns a checkpoint model for the new checkpoint.
62 """
63 raise NotImplementedError("must be implemented in a subclass")
64
65 def create_notebook_checkpoint(self, nb, path):
66 """Create a checkpoint of the current state of a file
67
68 Returns a checkpoint model for the new checkpoint.
69 """
70 raise NotImplementedError("must be implemented in a subclass")
71
72 def get_checkpoint(self, checkpoint_id, path, type):
73 """Get the content of a checkpoint.
74
75 Returns an unvalidated model with the same structure as
76 the return value of ContentsManager.get
77 """
78 raise NotImplementedError("must be implemented in a subclass")
79
80 def rename_checkpoint(self, checkpoint_id, old_path, new_path):
81 """Rename a single checkpoint from old_path to new_path."""
82 raise NotImplementedError("must be implemented in a subclass")
83
84 def delete_checkpoint(self, checkpoint_id, path):
85 """delete a checkpoint for a file"""
86 raise NotImplementedError("must be implemented in a subclass")
87
88 def list_checkpoints(self, path):
89 """Return a list of checkpoints for a given file"""
90 raise NotImplementedError("must be implemented in a subclass")
91
92 def rename_all_checkpoints(self, old_path, new_path):
93 """Rename all checkpoints for old_path to new_path."""
94 for cp in self.list_checkpoints(old_path):
95 self.rename_checkpoint(cp['id'], old_path, new_path)
96
97 def delete_all_checkpoints(self, path):
98 """Delete all checkpoints for the given path."""
99 for checkpoint in self.list_checkpoints(path):
100 self.delete_checkpoint(checkpoint['id'], path)
101
102
103 class ContentsManager(LoggingConfigurable):
33 class ContentsManager(LoggingConfigurable):
104 """Base class for serving files and directories.
34 """Base class for serving files and directories.
105
35
106 This serves any text or binary file,
36 This serves any text or binary file,
107 as well as directories,
37 as well as directories,
108 with special handling for JSON notebook documents.
38 with special handling for JSON notebook documents.
109
39
110 Most APIs take a path argument,
40 Most APIs take a path argument,
111 which is always an API-style unicode path,
41 which is always an API-style unicode path,
112 and always refers to a directory.
42 and always refers to a directory.
113
43
114 - unicode, not url-escaped
44 - unicode, not url-escaped
115 - '/'-separated
45 - '/'-separated
116 - leading and trailing '/' will be stripped
46 - leading and trailing '/' will be stripped
117 - if unspecified, path defaults to '',
47 - if unspecified, path defaults to '',
118 indicating the root path.
48 indicating the root path.
119
49
120 """
50 """
121
51
122 notary = Instance(sign.NotebookNotary)
52 notary = Instance(sign.NotebookNotary)
123 def _notary_default(self):
53 def _notary_default(self):
124 return sign.NotebookNotary(parent=self)
54 return sign.NotebookNotary(parent=self)
125
55
126 hide_globs = List(Unicode, [
56 hide_globs = List(Unicode, [
127 u'__pycache__', '*.pyc', '*.pyo',
57 u'__pycache__', '*.pyc', '*.pyo',
128 '.DS_Store', '*.so', '*.dylib', '*~',
58 '.DS_Store', '*.so', '*.dylib', '*~',
129 ], config=True, help="""
59 ], config=True, help="""
130 Glob patterns to hide in file and directory listings.
60 Glob patterns to hide in file and directory listings.
131 """)
61 """)
132
62
133 untitled_notebook = Unicode("Untitled", config=True,
63 untitled_notebook = Unicode("Untitled", config=True,
134 help="The base name used when creating untitled notebooks."
64 help="The base name used when creating untitled notebooks."
135 )
65 )
136
66
137 untitled_file = Unicode("untitled", config=True,
67 untitled_file = Unicode("untitled", config=True,
138 help="The base name used when creating untitled files."
68 help="The base name used when creating untitled files."
139 )
69 )
140
70
141 untitled_directory = Unicode("Untitled Folder", config=True,
71 untitled_directory = Unicode("Untitled Folder", config=True,
142 help="The base name used when creating untitled directories."
72 help="The base name used when creating untitled directories."
143 )
73 )
144
74
145 pre_save_hook = Any(None, config=True,
75 pre_save_hook = Any(None, config=True,
146 help="""Python callable or importstring thereof
76 help="""Python callable or importstring thereof
147
77
148 To be called on a contents model prior to save.
78 To be called on a contents model prior to save.
149
79
150 This can be used to process the structure,
80 This can be used to process the structure,
151 such as removing notebook outputs or other side effects that
81 such as removing notebook outputs or other side effects that
152 should not be saved.
82 should not be saved.
153
83
154 It will be called as (all arguments passed by keyword):
84 It will be called as (all arguments passed by keyword):
155
85
156 hook(path=path, model=model, contents_manager=self)
86 hook(path=path, model=model, contents_manager=self)
157
87
158 model: the model to be saved. Includes file contents.
88 model: the model to be saved. Includes file contents.
159 modifying this dict will affect the file that is stored.
89 modifying this dict will affect the file that is stored.
160 path: the API path of the save destination
90 path: the API path of the save destination
161 contents_manager: this ContentsManager instance
91 contents_manager: this ContentsManager instance
162 """
92 """
163 )
93 )
164 def _pre_save_hook_changed(self, name, old, new):
94 def _pre_save_hook_changed(self, name, old, new):
165 if new and isinstance(new, string_types):
95 if new and isinstance(new, string_types):
166 self.pre_save_hook = import_item(self.pre_save_hook)
96 self.pre_save_hook = import_item(self.pre_save_hook)
167 elif new:
97 elif new:
168 if not callable(new):
98 if not callable(new):
169 raise TraitError("pre_save_hook must be callable")
99 raise TraitError("pre_save_hook must be callable")
170
100
171 def run_pre_save_hook(self, model, path, **kwargs):
101 def run_pre_save_hook(self, model, path, **kwargs):
172 """Run the pre-save hook if defined, and log errors"""
102 """Run the pre-save hook if defined, and log errors"""
173 if self.pre_save_hook:
103 if self.pre_save_hook:
174 try:
104 try:
175 self.log.debug("Running pre-save hook on %s", path)
105 self.log.debug("Running pre-save hook on %s", path)
176 self.pre_save_hook(model=model, path=path, contents_manager=self, **kwargs)
106 self.pre_save_hook(model=model, path=path, contents_manager=self, **kwargs)
177 except Exception:
107 except Exception:
178 self.log.error("Pre-save hook failed on %s", path, exc_info=True)
108 self.log.error("Pre-save hook failed on %s", path, exc_info=True)
179
109
180 checkpoint_manager_class = Type(CheckpointManager, config=True)
110 checkpoint_manager_class = Type(CheckpointManager, config=True)
181 checkpoint_manager = Instance(CheckpointManager, config=True)
111 checkpoint_manager = Instance(CheckpointManager, config=True)
182 checkpoint_manager_kwargs = Dict(allow_none=False, config=True)
112 checkpoint_manager_kwargs = Dict(allow_none=False, config=True)
183 backend = Unicode(default_value="")
184
113
185 def _checkpoint_manager_default(self):
114 def _checkpoint_manager_default(self):
186 return self.checkpoint_manager_class(**self.checkpoint_manager_kwargs)
115 return self.checkpoint_manager_class(**self.checkpoint_manager_kwargs)
187
116
188 def _checkpoint_manager_kwargs_default(self):
117 def _checkpoint_manager_kwargs_default(self):
189 return dict(
118 return dict(
190 parent=self,
119 parent=self,
191 log=self.log,
120 log=self.log,
192 )
121 )
193
122
194 # ContentsManager API part 1: methods that must be
123 # ContentsManager API part 1: methods that must be
195 # implemented in subclasses.
124 # implemented in subclasses.
196
125
197 def dir_exists(self, path):
126 def dir_exists(self, path):
198 """Does the API-style path (directory) actually exist?
127 """Does the API-style path (directory) actually exist?
199
128
200 Like os.path.isdir
129 Like os.path.isdir
201
130
202 Override this method in subclasses.
131 Override this method in subclasses.
203
132
204 Parameters
133 Parameters
205 ----------
134 ----------
206 path : string
135 path : string
207 The path to check
136 The path to check
208
137
209 Returns
138 Returns
210 -------
139 -------
211 exists : bool
140 exists : bool
212 Whether the path does indeed exist.
141 Whether the path does indeed exist.
213 """
142 """
214 raise NotImplementedError
143 raise NotImplementedError
215
144
216 def is_hidden(self, path):
145 def is_hidden(self, path):
217 """Does the API style path correspond to a hidden directory or file?
146 """Does the API style path correspond to a hidden directory or file?
218
147
219 Parameters
148 Parameters
220 ----------
149 ----------
221 path : string
150 path : string
222 The path to check. This is an API path (`/` separated,
151 The path to check. This is an API path (`/` separated,
223 relative to root dir).
152 relative to root dir).
224
153
225 Returns
154 Returns
226 -------
155 -------
227 hidden : bool
156 hidden : bool
228 Whether the path is hidden.
157 Whether the path is hidden.
229
158
230 """
159 """
231 raise NotImplementedError
160 raise NotImplementedError
232
161
233 def file_exists(self, path=''):
162 def file_exists(self, path=''):
234 """Does a file exist at the given path?
163 """Does a file exist at the given path?
235
164
236 Like os.path.isfile
165 Like os.path.isfile
237
166
238 Override this method in subclasses.
167 Override this method in subclasses.
239
168
240 Parameters
169 Parameters
241 ----------
170 ----------
242 name : string
171 name : string
243 The name of the file you are checking.
172 The name of the file you are checking.
244 path : string
173 path : string
245 The relative path to the file's directory (with '/' as separator)
174 The relative path to the file's directory (with '/' as separator)
246
175
247 Returns
176 Returns
248 -------
177 -------
249 exists : bool
178 exists : bool
250 Whether the file exists.
179 Whether the file exists.
251 """
180 """
252 raise NotImplementedError('must be implemented in a subclass')
181 raise NotImplementedError('must be implemented in a subclass')
253
182
254 def exists(self, path):
183 def exists(self, path):
255 """Does a file or directory exist at the given path?
184 """Does a file or directory exist at the given path?
256
185
257 Like os.path.exists
186 Like os.path.exists
258
187
259 Parameters
188 Parameters
260 ----------
189 ----------
261 path : string
190 path : string
262 The relative path to the file's directory (with '/' as separator)
191 The relative path to the file's directory (with '/' as separator)
263
192
264 Returns
193 Returns
265 -------
194 -------
266 exists : bool
195 exists : bool
267 Whether the target exists.
196 Whether the target exists.
268 """
197 """
269 return self.file_exists(path) or self.dir_exists(path)
198 return self.file_exists(path) or self.dir_exists(path)
270
199
271 def get(self, path, content=True, type=None, format=None):
200 def get(self, path, content=True, type=None, format=None):
272 """Get the model of a file or directory with or without content."""
201 """Get the model of a file or directory with or without content."""
273 raise NotImplementedError('must be implemented in a subclass')
202 raise NotImplementedError('must be implemented in a subclass')
274
203
275 def save(self, model, path):
204 def save(self, model, path):
276 """Save the file or directory and return the model with no content.
205 """Save the file or directory and return the model with no content.
277
206
278 Save implementations should call self.run_pre_save_hook(model=model, path=path)
207 Save implementations should call self.run_pre_save_hook(model=model, path=path)
279 prior to writing any data.
208 prior to writing any data.
280 """
209 """
281 raise NotImplementedError('must be implemented in a subclass')
210 raise NotImplementedError('must be implemented in a subclass')
282
211
283 def delete_file(self, path):
212 def delete_file(self, path):
284 """Delete file or directory by path."""
213 """Delete file or directory by path."""
285 raise NotImplementedError('must be implemented in a subclass')
214 raise NotImplementedError('must be implemented in a subclass')
286
215
287 def rename_file(self, old_path, new_path):
216 def rename_file(self, old_path, new_path):
288 """Rename a file."""
217 """Rename a file."""
289 raise NotImplementedError('must be implemented in a subclass')
218 raise NotImplementedError('must be implemented in a subclass')
290
219
291 # ContentsManager API part 2: methods that have useable default
220 # ContentsManager API part 2: methods that have useable default
292 # implementations, but can be overridden in subclasses.
221 # implementations, but can be overridden in subclasses.
293
222
294 def delete(self, path):
223 def delete(self, path):
295 """Delete a file/directory and any associated checkpoints."""
224 """Delete a file/directory and any associated checkpoints."""
296 self.delete_file(path)
225 self.delete_file(path)
297 self.checkpoint_manager.delete_all_checkpoints(path)
226 self.checkpoint_manager.delete_all_checkpoints(path)
298
227
299 def rename(self, old_path, new_path):
228 def rename(self, old_path, new_path):
300 """Rename a file and any checkpoints associated with that file."""
229 """Rename a file and any checkpoints associated with that file."""
301 self.rename_file(old_path, new_path)
230 self.rename_file(old_path, new_path)
302 self.checkpoint_manager.rename_all_checkpoints(old_path, new_path)
231 self.checkpoint_manager.rename_all_checkpoints(old_path, new_path)
303
232
304 def update(self, model, path):
233 def update(self, model, path):
305 """Update the file's path
234 """Update the file's path
306
235
307 For use in PATCH requests, to enable renaming a file without
236 For use in PATCH requests, to enable renaming a file without
308 re-uploading its contents. Only used for renaming at the moment.
237 re-uploading its contents. Only used for renaming at the moment.
309 """
238 """
310 path = path.strip('/')
239 path = path.strip('/')
311 new_path = model.get('path', path).strip('/')
240 new_path = model.get('path', path).strip('/')
312 if path != new_path:
241 if path != new_path:
313 self.rename(path, new_path)
242 self.rename(path, new_path)
314 model = self.get(new_path, content=False)
243 model = self.get(new_path, content=False)
315 return model
244 return model
316
245
317 def info_string(self):
246 def info_string(self):
318 return "Serving contents"
247 return "Serving contents"
319
248
320 def get_kernel_path(self, path, model=None):
249 def get_kernel_path(self, path, model=None):
321 """Return the API path for the kernel
250 """Return the API path for the kernel
322
251
323 KernelManagers can turn this value into a filesystem path,
252 KernelManagers can turn this value into a filesystem path,
324 or ignore it altogether.
253 or ignore it altogether.
325
254
326 The default value here will start kernels in the directory of the
255 The default value here will start kernels in the directory of the
327 notebook server. FileContentsManager overrides this to use the
256 notebook server. FileContentsManager overrides this to use the
328 directory containing the notebook.
257 directory containing the notebook.
329 """
258 """
330 return ''
259 return ''
331
260
332 def increment_filename(self, filename, path='', insert=''):
261 def increment_filename(self, filename, path='', insert=''):
333 """Increment a filename until it is unique.
262 """Increment a filename until it is unique.
334
263
335 Parameters
264 Parameters
336 ----------
265 ----------
337 filename : unicode
266 filename : unicode
338 The name of a file, including extension
267 The name of a file, including extension
339 path : unicode
268 path : unicode
340 The API path of the target's directory
269 The API path of the target's directory
341
270
342 Returns
271 Returns
343 -------
272 -------
344 name : unicode
273 name : unicode
345 A filename that is unique, based on the input filename.
274 A filename that is unique, based on the input filename.
346 """
275 """
347 path = path.strip('/')
276 path = path.strip('/')
348 basename, ext = os.path.splitext(filename)
277 basename, ext = os.path.splitext(filename)
349 for i in itertools.count():
278 for i in itertools.count():
350 if i:
279 if i:
351 insert_i = '{}{}'.format(insert, i)
280 insert_i = '{}{}'.format(insert, i)
352 else:
281 else:
353 insert_i = ''
282 insert_i = ''
354 name = u'{basename}{insert}{ext}'.format(basename=basename,
283 name = u'{basename}{insert}{ext}'.format(basename=basename,
355 insert=insert_i, ext=ext)
284 insert=insert_i, ext=ext)
356 if not self.exists(u'{}/{}'.format(path, name)):
285 if not self.exists(u'{}/{}'.format(path, name)):
357 break
286 break
358 return name
287 return name
359
288
360 def validate_notebook_model(self, model):
289 def validate_notebook_model(self, model):
361 """Add failed-validation message to model"""
290 """Add failed-validation message to model"""
362 try:
291 try:
363 validate(model['content'])
292 validate(model['content'])
364 except ValidationError as e:
293 except ValidationError as e:
365 model['message'] = u'Notebook Validation failed: {}:\n{}'.format(
294 model['message'] = u'Notebook Validation failed: {}:\n{}'.format(
366 e.message, json.dumps(e.instance, indent=1, default=lambda obj: '<UNKNOWN>'),
295 e.message, json.dumps(e.instance, indent=1, default=lambda obj: '<UNKNOWN>'),
367 )
296 )
368 return model
297 return model
369
298
370 def new_untitled(self, path='', type='', ext=''):
299 def new_untitled(self, path='', type='', ext=''):
371 """Create a new untitled file or directory in path
300 """Create a new untitled file or directory in path
372
301
373 path must be a directory
302 path must be a directory
374
303
375 File extension can be specified.
304 File extension can be specified.
376
305
377 Use `new` to create files with a fully specified path (including filename).
306 Use `new` to create files with a fully specified path (including filename).
378 """
307 """
379 path = path.strip('/')
308 path = path.strip('/')
380 if not self.dir_exists(path):
309 if not self.dir_exists(path):
381 raise HTTPError(404, 'No such directory: %s' % path)
310 raise HTTPError(404, 'No such directory: %s' % path)
382
311
383 model = {}
312 model = {}
384 if type:
313 if type:
385 model['type'] = type
314 model['type'] = type
386
315
387 if ext == '.ipynb':
316 if ext == '.ipynb':
388 model.setdefault('type', 'notebook')
317 model.setdefault('type', 'notebook')
389 else:
318 else:
390 model.setdefault('type', 'file')
319 model.setdefault('type', 'file')
391
320
392 insert = ''
321 insert = ''
393 if model['type'] == 'directory':
322 if model['type'] == 'directory':
394 untitled = self.untitled_directory
323 untitled = self.untitled_directory
395 insert = ' '
324 insert = ' '
396 elif model['type'] == 'notebook':
325 elif model['type'] == 'notebook':
397 untitled = self.untitled_notebook
326 untitled = self.untitled_notebook
398 ext = '.ipynb'
327 ext = '.ipynb'
399 elif model['type'] == 'file':
328 elif model['type'] == 'file':
400 untitled = self.untitled_file
329 untitled = self.untitled_file
401 else:
330 else:
402 raise HTTPError(400, "Unexpected model type: %r" % model['type'])
331 raise HTTPError(400, "Unexpected model type: %r" % model['type'])
403
332
404 name = self.increment_filename(untitled + ext, path, insert=insert)
333 name = self.increment_filename(untitled + ext, path, insert=insert)
405 path = u'{0}/{1}'.format(path, name)
334 path = u'{0}/{1}'.format(path, name)
406 return self.new(model, path)
335 return self.new(model, path)
407
336
408 def new(self, model=None, path=''):
337 def new(self, model=None, path=''):
409 """Create a new file or directory and return its model with no content.
338 """Create a new file or directory and return its model with no content.
410
339
411 To create a new untitled entity in a directory, use `new_untitled`.
340 To create a new untitled entity in a directory, use `new_untitled`.
412 """
341 """
413 path = path.strip('/')
342 path = path.strip('/')
414 if model is None:
343 if model is None:
415 model = {}
344 model = {}
416
345
417 if path.endswith('.ipynb'):
346 if path.endswith('.ipynb'):
418 model.setdefault('type', 'notebook')
347 model.setdefault('type', 'notebook')
419 else:
348 else:
420 model.setdefault('type', 'file')
349 model.setdefault('type', 'file')
421
350
422 # no content, not a directory, so fill out new-file model
351 # no content, not a directory, so fill out new-file model
423 if 'content' not in model and model['type'] != 'directory':
352 if 'content' not in model and model['type'] != 'directory':
424 if model['type'] == 'notebook':
353 if model['type'] == 'notebook':
425 model['content'] = new_notebook()
354 model['content'] = new_notebook()
426 model['format'] = 'json'
355 model['format'] = 'json'
427 else:
356 else:
428 model['content'] = ''
357 model['content'] = ''
429 model['type'] = 'file'
358 model['type'] = 'file'
430 model['format'] = 'text'
359 model['format'] = 'text'
431
360
432 model = self.save(model, path)
361 model = self.save(model, path)
433 return model
362 return model
434
363
435 def copy(self, from_path, to_path=None):
364 def copy(self, from_path, to_path=None):
436 """Copy an existing file and return its new model.
365 """Copy an existing file and return its new model.
437
366
438 If to_path not specified, it will be the parent directory of from_path.
367 If to_path not specified, it will be the parent directory of from_path.
439 If to_path is a directory, filename will increment `from_path-Copy#.ext`.
368 If to_path is a directory, filename will increment `from_path-Copy#.ext`.
440
369
441 from_path must be a full path to a file.
370 from_path must be a full path to a file.
442 """
371 """
443 path = from_path.strip('/')
372 path = from_path.strip('/')
444 if to_path is not None:
373 if to_path is not None:
445 to_path = to_path.strip('/')
374 to_path = to_path.strip('/')
446
375
447 if '/' in path:
376 if '/' in path:
448 from_dir, from_name = path.rsplit('/', 1)
377 from_dir, from_name = path.rsplit('/', 1)
449 else:
378 else:
450 from_dir = ''
379 from_dir = ''
451 from_name = path
380 from_name = path
452
381
453 model = self.get(path)
382 model = self.get(path)
454 model.pop('path', None)
383 model.pop('path', None)
455 model.pop('name', None)
384 model.pop('name', None)
456 if model['type'] == 'directory':
385 if model['type'] == 'directory':
457 raise HTTPError(400, "Can't copy directories")
386 raise HTTPError(400, "Can't copy directories")
458
387
459 if to_path is None:
388 if to_path is None:
460 to_path = from_dir
389 to_path = from_dir
461 if self.dir_exists(to_path):
390 if self.dir_exists(to_path):
462 name = copy_pat.sub(u'.', from_name)
391 name = copy_pat.sub(u'.', from_name)
463 to_name = self.increment_filename(name, to_path, insert='-Copy')
392 to_name = self.increment_filename(name, to_path, insert='-Copy')
464 to_path = u'{0}/{1}'.format(to_path, to_name)
393 to_path = u'{0}/{1}'.format(to_path, to_name)
465
394
466 model = self.save(model, to_path)
395 model = self.save(model, to_path)
467 return model
396 return model
468
397
469 def log_info(self):
398 def log_info(self):
470 self.log.info(self.info_string())
399 self.log.info(self.info_string())
471
400
472 def trust_notebook(self, path):
401 def trust_notebook(self, path):
473 """Explicitly trust a notebook
402 """Explicitly trust a notebook
474
403
475 Parameters
404 Parameters
476 ----------
405 ----------
477 path : string
406 path : string
478 The path of a notebook
407 The path of a notebook
479 """
408 """
480 model = self.get(path)
409 model = self.get(path)
481 nb = model['content']
410 nb = model['content']
482 self.log.warn("Trusting notebook %s", path)
411 self.log.warn("Trusting notebook %s", path)
483 self.notary.mark_cells(nb, True)
412 self.notary.mark_cells(nb, True)
484 self.save(model, path)
413 self.save(model, path)
485
414
486 def check_and_sign(self, nb, path=''):
415 def check_and_sign(self, nb, path=''):
487 """Check for trusted cells, and sign the notebook.
416 """Check for trusted cells, and sign the notebook.
488
417
489 Called as a part of saving notebooks.
418 Called as a part of saving notebooks.
490
419
491 Parameters
420 Parameters
492 ----------
421 ----------
493 nb : dict
422 nb : dict
494 The notebook dict
423 The notebook dict
495 path : string
424 path : string
496 The notebook's path (for logging)
425 The notebook's path (for logging)
497 """
426 """
498 if self.notary.check_cells(nb):
427 if self.notary.check_cells(nb):
499 self.notary.sign(nb)
428 self.notary.sign(nb)
500 else:
429 else:
501 self.log.warn("Saving untrusted notebook %s", path)
430 self.log.warn("Saving untrusted notebook %s", path)
502
431
503 def mark_trusted_cells(self, nb, path=''):
432 def mark_trusted_cells(self, nb, path=''):
504 """Mark cells as trusted if the notebook signature matches.
433 """Mark cells as trusted if the notebook signature matches.
505
434
506 Called as a part of loading notebooks.
435 Called as a part of loading notebooks.
507
436
508 Parameters
437 Parameters
509 ----------
438 ----------
510 nb : dict
439 nb : dict
511 The notebook object (in current nbformat)
440 The notebook object (in current nbformat)
512 path : string
441 path : string
513 The notebook's path (for logging)
442 The notebook's path (for logging)
514 """
443 """
515 trusted = self.notary.check_signature(nb)
444 trusted = self.notary.check_signature(nb)
516 if not trusted:
445 if not trusted:
517 self.log.warn("Notebook %s is not trusted", path)
446 self.log.warn("Notebook %s is not trusted", path)
518 self.notary.mark_cells(nb, trusted)
447 self.notary.mark_cells(nb, trusted)
519
448
520 def should_list(self, name):
449 def should_list(self, name):
521 """Should this file/directory name be displayed in a listing?"""
450 """Should this file/directory name be displayed in a listing?"""
522 return not any(fnmatch(name, glob) for glob in self.hide_globs)
451 return not any(fnmatch(name, glob) for glob in self.hide_globs)
523
452
524 # Part 3: Checkpoints API
453 # Part 3: Checkpoints API
525 def create_checkpoint(self, path):
454 def create_checkpoint(self, path):
526 """Create a checkpoint."""
455 """Create a checkpoint."""
527 return self.checkpoint_manager.create_checkpoint(self, path)
456 return self.checkpoint_manager.create_checkpoint(self, path)
528
457
529 def restore_checkpoint(self, checkpoint_id, path):
458 def restore_checkpoint(self, checkpoint_id, path):
530 """
459 """
531 Restore a checkpoint.
460 Restore a checkpoint.
532 """
461 """
533 self.checkpoint_manager.restore_checkpoint(self, checkpoint_id, path)
462 self.checkpoint_manager.restore_checkpoint(self, checkpoint_id, path)
534
463
535 def list_checkpoints(self, path):
464 def list_checkpoints(self, path):
536 return self.checkpoint_manager.list_checkpoints(path)
465 return self.checkpoint_manager.list_checkpoints(path)
537
466
538 def delete_checkpoint(self, checkpoint_id, path):
467 def delete_checkpoint(self, checkpoint_id, path):
539 return self.checkpoint_manager.delete_checkpoint(checkpoint_id, path)
468 return self.checkpoint_manager.delete_checkpoint(checkpoint_id, path)
@@ -1,638 +1,629 b''
1 # coding: utf-8
1 # coding: utf-8
2 """Test the contents webservice API."""
2 """Test the contents webservice API."""
3
3
4 import base64
4 import base64
5 from contextlib import contextmanager
5 from contextlib import contextmanager
6 import io
6 import io
7 import json
7 import json
8 import os
8 import os
9 import shutil
9 import shutil
10 from unicodedata import normalize
10 from unicodedata import normalize
11
11
12 pjoin = os.path.join
12 pjoin = os.path.join
13
13
14 import requests
14 import requests
15
15
16 from ..filecheckpoints import GenericFileCheckpointManager
17
18 from IPython.config import Config
16 from IPython.html.utils import url_path_join, url_escape, to_os_path
19 from IPython.html.utils import url_path_join, url_escape, to_os_path
17 from IPython.html.tests.launchnotebook import NotebookTestBase, assert_http_error
20 from IPython.html.tests.launchnotebook import NotebookTestBase, assert_http_error
18 from IPython.nbformat import read, write, from_dict
21 from IPython.nbformat import read, write, from_dict
19 from IPython.nbformat.v4 import (
22 from IPython.nbformat.v4 import (
20 new_notebook, new_markdown_cell,
23 new_notebook, new_markdown_cell,
21 )
24 )
22 from IPython.nbformat import v2
25 from IPython.nbformat import v2
23 from IPython.utils import py3compat
26 from IPython.utils import py3compat
24 from IPython.utils.data import uniq_stable
27 from IPython.utils.data import uniq_stable
25 from IPython.utils.tempdir import TemporaryDirectory
28 from IPython.utils.tempdir import TemporaryDirectory
26
29
27
30
28 def notebooks_only(dir_model):
31 def notebooks_only(dir_model):
29 return [nb for nb in dir_model['content'] if nb['type']=='notebook']
32 return [nb for nb in dir_model['content'] if nb['type']=='notebook']
30
33
31 def dirs_only(dir_model):
34 def dirs_only(dir_model):
32 return [x for x in dir_model['content'] if x['type']=='directory']
35 return [x for x in dir_model['content'] if x['type']=='directory']
33
36
34
37
35 class API(object):
38 class API(object):
36 """Wrapper for contents API calls."""
39 """Wrapper for contents API calls."""
37 def __init__(self, base_url):
40 def __init__(self, base_url):
38 self.base_url = base_url
41 self.base_url = base_url
39
42
40 def _req(self, verb, path, body=None, params=None):
43 def _req(self, verb, path, body=None, params=None):
41 response = requests.request(verb,
44 response = requests.request(verb,
42 url_path_join(self.base_url, 'api/contents', path),
45 url_path_join(self.base_url, 'api/contents', path),
43 data=body, params=params,
46 data=body, params=params,
44 )
47 )
45 response.raise_for_status()
48 response.raise_for_status()
46 return response
49 return response
47
50
48 def list(self, path='/'):
51 def list(self, path='/'):
49 return self._req('GET', path)
52 return self._req('GET', path)
50
53
51 def read(self, path, type=None, format=None):
54 def read(self, path, type=None, format=None):
52 params = {}
55 params = {}
53 if type is not None:
56 if type is not None:
54 params['type'] = type
57 params['type'] = type
55 if format is not None:
58 if format is not None:
56 params['format'] = format
59 params['format'] = format
57 return self._req('GET', path, params=params)
60 return self._req('GET', path, params=params)
58
61
59 def create_untitled(self, path='/', ext='.ipynb'):
62 def create_untitled(self, path='/', ext='.ipynb'):
60 body = None
63 body = None
61 if ext:
64 if ext:
62 body = json.dumps({'ext': ext})
65 body = json.dumps({'ext': ext})
63 return self._req('POST', path, body)
66 return self._req('POST', path, body)
64
67
65 def mkdir_untitled(self, path='/'):
68 def mkdir_untitled(self, path='/'):
66 return self._req('POST', path, json.dumps({'type': 'directory'}))
69 return self._req('POST', path, json.dumps({'type': 'directory'}))
67
70
68 def copy(self, copy_from, path='/'):
71 def copy(self, copy_from, path='/'):
69 body = json.dumps({'copy_from':copy_from})
72 body = json.dumps({'copy_from':copy_from})
70 return self._req('POST', path, body)
73 return self._req('POST', path, body)
71
74
72 def create(self, path='/'):
75 def create(self, path='/'):
73 return self._req('PUT', path)
76 return self._req('PUT', path)
74
77
75 def upload(self, path, body):
78 def upload(self, path, body):
76 return self._req('PUT', path, body)
79 return self._req('PUT', path, body)
77
80
78 def mkdir(self, path='/'):
81 def mkdir(self, path='/'):
79 return self._req('PUT', path, json.dumps({'type': 'directory'}))
82 return self._req('PUT', path, json.dumps({'type': 'directory'}))
80
83
81 def copy_put(self, copy_from, path='/'):
84 def copy_put(self, copy_from, path='/'):
82 body = json.dumps({'copy_from':copy_from})
85 body = json.dumps({'copy_from':copy_from})
83 return self._req('PUT', path, body)
86 return self._req('PUT', path, body)
84
87
85 def save(self, path, body):
88 def save(self, path, body):
86 return self._req('PUT', path, body)
89 return self._req('PUT', path, body)
87
90
88 def delete(self, path='/'):
91 def delete(self, path='/'):
89 return self._req('DELETE', path)
92 return self._req('DELETE', path)
90
93
91 def rename(self, path, new_path):
94 def rename(self, path, new_path):
92 body = json.dumps({'path': new_path})
95 body = json.dumps({'path': new_path})
93 return self._req('PATCH', path, body)
96 return self._req('PATCH', path, body)
94
97
95 def get_checkpoints(self, path):
98 def get_checkpoints(self, path):
96 return self._req('GET', url_path_join(path, 'checkpoints'))
99 return self._req('GET', url_path_join(path, 'checkpoints'))
97
100
98 def new_checkpoint(self, path):
101 def new_checkpoint(self, path):
99 return self._req('POST', url_path_join(path, 'checkpoints'))
102 return self._req('POST', url_path_join(path, 'checkpoints'))
100
103
101 def restore_checkpoint(self, path, checkpoint_id):
104 def restore_checkpoint(self, path, checkpoint_id):
102 return self._req('POST', url_path_join(path, 'checkpoints', checkpoint_id))
105 return self._req('POST', url_path_join(path, 'checkpoints', checkpoint_id))
103
106
104 def delete_checkpoint(self, path, checkpoint_id):
107 def delete_checkpoint(self, path, checkpoint_id):
105 return self._req('DELETE', url_path_join(path, 'checkpoints', checkpoint_id))
108 return self._req('DELETE', url_path_join(path, 'checkpoints', checkpoint_id))
106
109
107 class APITest(NotebookTestBase):
110 class APITest(NotebookTestBase):
108 """Test the kernels web service API"""
111 """Test the kernels web service API"""
109 dirs_nbs = [('', 'inroot'),
112 dirs_nbs = [('', 'inroot'),
110 ('Directory with spaces in', 'inspace'),
113 ('Directory with spaces in', 'inspace'),
111 (u'unicodΓ©', 'innonascii'),
114 (u'unicodΓ©', 'innonascii'),
112 ('foo', 'a'),
115 ('foo', 'a'),
113 ('foo', 'b'),
116 ('foo', 'b'),
114 ('foo', 'name with spaces'),
117 ('foo', 'name with spaces'),
115 ('foo', u'unicodΓ©'),
118 ('foo', u'unicodΓ©'),
116 ('foo/bar', 'baz'),
119 ('foo/bar', 'baz'),
117 ('ordering', 'A'),
120 ('ordering', 'A'),
118 ('ordering', 'b'),
121 ('ordering', 'b'),
119 ('ordering', 'C'),
122 ('ordering', 'C'),
120 (u'Γ₯ b', u'Γ§ d'),
123 (u'Γ₯ b', u'Γ§ d'),
121 ]
124 ]
122 hidden_dirs = ['.hidden', '__pycache__']
125 hidden_dirs = ['.hidden', '__pycache__']
123
126
124 # Don't include root dir.
127 # Don't include root dir.
125 dirs = uniq_stable([py3compat.cast_unicode(d) for (d,n) in dirs_nbs[1:]])
128 dirs = uniq_stable([py3compat.cast_unicode(d) for (d,n) in dirs_nbs[1:]])
126 top_level_dirs = {normalize('NFC', d.split('/')[0]) for d in dirs}
129 top_level_dirs = {normalize('NFC', d.split('/')[0]) for d in dirs}
127
130
128 @staticmethod
131 @staticmethod
129 def _blob_for_name(name):
132 def _blob_for_name(name):
130 return name.encode('utf-8') + b'\xFF'
133 return name.encode('utf-8') + b'\xFF'
131
134
132 @staticmethod
135 @staticmethod
133 def _txt_for_name(name):
136 def _txt_for_name(name):
134 return u'%s text file' % name
137 return u'%s text file' % name
135
138
136 def to_os_path(self, api_path):
139 def to_os_path(self, api_path):
137 return to_os_path(api_path, root=self.notebook_dir.name)
140 return to_os_path(api_path, root=self.notebook_dir.name)
138
141
139 def make_dir(self, api_path):
142 def make_dir(self, api_path):
140 """Create a directory at api_path"""
143 """Create a directory at api_path"""
141 os_path = self.to_os_path(api_path)
144 os_path = self.to_os_path(api_path)
142 try:
145 try:
143 os.makedirs(os_path)
146 os.makedirs(os_path)
144 except OSError:
147 except OSError:
145 print("Directory already exists: %r" % os_path)
148 print("Directory already exists: %r" % os_path)
146
149
147 def make_txt(self, api_path, txt):
150 def make_txt(self, api_path, txt):
148 """Make a text file at a given api_path"""
151 """Make a text file at a given api_path"""
149 os_path = self.to_os_path(api_path)
152 os_path = self.to_os_path(api_path)
150 with io.open(os_path, 'w', encoding='utf-8') as f:
153 with io.open(os_path, 'w', encoding='utf-8') as f:
151 f.write(txt)
154 f.write(txt)
152
155
153 def make_blob(self, api_path, blob):
156 def make_blob(self, api_path, blob):
154 """Make a binary file at a given api_path"""
157 """Make a binary file at a given api_path"""
155 os_path = self.to_os_path(api_path)
158 os_path = self.to_os_path(api_path)
156 with io.open(os_path, 'wb') as f:
159 with io.open(os_path, 'wb') as f:
157 f.write(blob)
160 f.write(blob)
158
161
159 def make_nb(self, api_path, nb):
162 def make_nb(self, api_path, nb):
160 """Make a notebook file at a given api_path"""
163 """Make a notebook file at a given api_path"""
161 os_path = self.to_os_path(api_path)
164 os_path = self.to_os_path(api_path)
162
165
163 with io.open(os_path, 'w', encoding='utf-8') as f:
166 with io.open(os_path, 'w', encoding='utf-8') as f:
164 write(nb, f, version=4)
167 write(nb, f, version=4)
165
168
166 def delete_dir(self, api_path):
169 def delete_dir(self, api_path):
167 """Delete a directory at api_path, removing any contents."""
170 """Delete a directory at api_path, removing any contents."""
168 os_path = self.to_os_path(api_path)
171 os_path = self.to_os_path(api_path)
169 shutil.rmtree(os_path, ignore_errors=True)
172 shutil.rmtree(os_path, ignore_errors=True)
170
173
171 def delete_file(self, api_path):
174 def delete_file(self, api_path):
172 """Delete a file at the given path if it exists."""
175 """Delete a file at the given path if it exists."""
173 if self.isfile(api_path):
176 if self.isfile(api_path):
174 os.unlink(self.to_os_path(api_path))
177 os.unlink(self.to_os_path(api_path))
175
178
176 def isfile(self, api_path):
179 def isfile(self, api_path):
177 return os.path.isfile(self.to_os_path(api_path))
180 return os.path.isfile(self.to_os_path(api_path))
178
181
179 def isdir(self, api_path):
182 def isdir(self, api_path):
180 return os.path.isdir(self.to_os_path(api_path))
183 return os.path.isdir(self.to_os_path(api_path))
181
184
182 def setUp(self):
185 def setUp(self):
183
186
184 for d in (self.dirs + self.hidden_dirs):
187 for d in (self.dirs + self.hidden_dirs):
185 self.make_dir(d)
188 self.make_dir(d)
186
189
187 for d, name in self.dirs_nbs:
190 for d, name in self.dirs_nbs:
188 # create a notebook
191 # create a notebook
189 nb = new_notebook()
192 nb = new_notebook()
190 self.make_nb(u'{}/{}.ipynb'.format(d, name), nb)
193 self.make_nb(u'{}/{}.ipynb'.format(d, name), nb)
191
194
192 # create a text file
195 # create a text file
193 txt = self._txt_for_name(name)
196 txt = self._txt_for_name(name)
194 self.make_txt(u'{}/{}.txt'.format(d, name), txt)
197 self.make_txt(u'{}/{}.txt'.format(d, name), txt)
195
198
196 # create a binary file
199 # create a binary file
197 blob = self._blob_for_name(name)
200 blob = self._blob_for_name(name)
198 self.make_blob(u'{}/{}.blob'.format(d, name), blob)
201 self.make_blob(u'{}/{}.blob'.format(d, name), blob)
199
202
200 self.api = API(self.base_url())
203 self.api = API(self.base_url())
201
204
202 def tearDown(self):
205 def tearDown(self):
203 for dname in (list(self.top_level_dirs) + self.hidden_dirs):
206 for dname in (list(self.top_level_dirs) + self.hidden_dirs):
204 self.delete_dir(dname)
207 self.delete_dir(dname)
205 self.delete_file('inroot.ipynb')
208 self.delete_file('inroot.ipynb')
206
209
207 def test_list_notebooks(self):
210 def test_list_notebooks(self):
208 nbs = notebooks_only(self.api.list().json())
211 nbs = notebooks_only(self.api.list().json())
209 self.assertEqual(len(nbs), 1)
212 self.assertEqual(len(nbs), 1)
210 self.assertEqual(nbs[0]['name'], 'inroot.ipynb')
213 self.assertEqual(nbs[0]['name'], 'inroot.ipynb')
211
214
212 nbs = notebooks_only(self.api.list('/Directory with spaces in/').json())
215 nbs = notebooks_only(self.api.list('/Directory with spaces in/').json())
213 self.assertEqual(len(nbs), 1)
216 self.assertEqual(len(nbs), 1)
214 self.assertEqual(nbs[0]['name'], 'inspace.ipynb')
217 self.assertEqual(nbs[0]['name'], 'inspace.ipynb')
215
218
216 nbs = notebooks_only(self.api.list(u'/unicodΓ©/').json())
219 nbs = notebooks_only(self.api.list(u'/unicodΓ©/').json())
217 self.assertEqual(len(nbs), 1)
220 self.assertEqual(len(nbs), 1)
218 self.assertEqual(nbs[0]['name'], 'innonascii.ipynb')
221 self.assertEqual(nbs[0]['name'], 'innonascii.ipynb')
219 self.assertEqual(nbs[0]['path'], u'unicodΓ©/innonascii.ipynb')
222 self.assertEqual(nbs[0]['path'], u'unicodΓ©/innonascii.ipynb')
220
223
221 nbs = notebooks_only(self.api.list('/foo/bar/').json())
224 nbs = notebooks_only(self.api.list('/foo/bar/').json())
222 self.assertEqual(len(nbs), 1)
225 self.assertEqual(len(nbs), 1)
223 self.assertEqual(nbs[0]['name'], 'baz.ipynb')
226 self.assertEqual(nbs[0]['name'], 'baz.ipynb')
224 self.assertEqual(nbs[0]['path'], 'foo/bar/baz.ipynb')
227 self.assertEqual(nbs[0]['path'], 'foo/bar/baz.ipynb')
225
228
226 nbs = notebooks_only(self.api.list('foo').json())
229 nbs = notebooks_only(self.api.list('foo').json())
227 self.assertEqual(len(nbs), 4)
230 self.assertEqual(len(nbs), 4)
228 nbnames = { normalize('NFC', n['name']) for n in nbs }
231 nbnames = { normalize('NFC', n['name']) for n in nbs }
229 expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodΓ©.ipynb']
232 expected = [ u'a.ipynb', u'b.ipynb', u'name with spaces.ipynb', u'unicodΓ©.ipynb']
230 expected = { normalize('NFC', name) for name in expected }
233 expected = { normalize('NFC', name) for name in expected }
231 self.assertEqual(nbnames, expected)
234 self.assertEqual(nbnames, expected)
232
235
233 nbs = notebooks_only(self.api.list('ordering').json())
236 nbs = notebooks_only(self.api.list('ordering').json())
234 nbnames = [n['name'] for n in nbs]
237 nbnames = [n['name'] for n in nbs]
235 expected = ['A.ipynb', 'b.ipynb', 'C.ipynb']
238 expected = ['A.ipynb', 'b.ipynb', 'C.ipynb']
236 self.assertEqual(nbnames, expected)
239 self.assertEqual(nbnames, expected)
237
240
238 def test_list_dirs(self):
241 def test_list_dirs(self):
239 dirs = dirs_only(self.api.list().json())
242 dirs = dirs_only(self.api.list().json())
240 dir_names = {normalize('NFC', d['name']) for d in dirs}
243 dir_names = {normalize('NFC', d['name']) for d in dirs}
241 self.assertEqual(dir_names, self.top_level_dirs) # Excluding hidden dirs
244 self.assertEqual(dir_names, self.top_level_dirs) # Excluding hidden dirs
242
245
243 def test_list_nonexistant_dir(self):
246 def test_list_nonexistant_dir(self):
244 with assert_http_error(404):
247 with assert_http_error(404):
245 self.api.list('nonexistant')
248 self.api.list('nonexistant')
246
249
247 def test_get_nb_contents(self):
250 def test_get_nb_contents(self):
248 for d, name in self.dirs_nbs:
251 for d, name in self.dirs_nbs:
249 path = url_path_join(d, name + '.ipynb')
252 path = url_path_join(d, name + '.ipynb')
250 nb = self.api.read(path).json()
253 nb = self.api.read(path).json()
251 self.assertEqual(nb['name'], u'%s.ipynb' % name)
254 self.assertEqual(nb['name'], u'%s.ipynb' % name)
252 self.assertEqual(nb['path'], path)
255 self.assertEqual(nb['path'], path)
253 self.assertEqual(nb['type'], 'notebook')
256 self.assertEqual(nb['type'], 'notebook')
254 self.assertIn('content', nb)
257 self.assertIn('content', nb)
255 self.assertEqual(nb['format'], 'json')
258 self.assertEqual(nb['format'], 'json')
256 self.assertIn('content', nb)
259 self.assertIn('content', nb)
257 self.assertIn('metadata', nb['content'])
260 self.assertIn('metadata', nb['content'])
258 self.assertIsInstance(nb['content']['metadata'], dict)
261 self.assertIsInstance(nb['content']['metadata'], dict)
259
262
260 def test_get_contents_no_such_file(self):
263 def test_get_contents_no_such_file(self):
261 # Name that doesn't exist - should be a 404
264 # Name that doesn't exist - should be a 404
262 with assert_http_error(404):
265 with assert_http_error(404):
263 self.api.read('foo/q.ipynb')
266 self.api.read('foo/q.ipynb')
264
267
265 def test_get_text_file_contents(self):
268 def test_get_text_file_contents(self):
266 for d, name in self.dirs_nbs:
269 for d, name in self.dirs_nbs:
267 path = url_path_join(d, name + '.txt')
270 path = url_path_join(d, name + '.txt')
268 model = self.api.read(path).json()
271 model = self.api.read(path).json()
269 self.assertEqual(model['name'], u'%s.txt' % name)
272 self.assertEqual(model['name'], u'%s.txt' % name)
270 self.assertEqual(model['path'], path)
273 self.assertEqual(model['path'], path)
271 self.assertIn('content', model)
274 self.assertIn('content', model)
272 self.assertEqual(model['format'], 'text')
275 self.assertEqual(model['format'], 'text')
273 self.assertEqual(model['type'], 'file')
276 self.assertEqual(model['type'], 'file')
274 self.assertEqual(model['content'], self._txt_for_name(name))
277 self.assertEqual(model['content'], self._txt_for_name(name))
275
278
276 # Name that doesn't exist - should be a 404
279 # Name that doesn't exist - should be a 404
277 with assert_http_error(404):
280 with assert_http_error(404):
278 self.api.read('foo/q.txt')
281 self.api.read('foo/q.txt')
279
282
280 # Specifying format=text should fail on a non-UTF-8 file
283 # Specifying format=text should fail on a non-UTF-8 file
281 with assert_http_error(400):
284 with assert_http_error(400):
282 self.api.read('foo/bar/baz.blob', type='file', format='text')
285 self.api.read('foo/bar/baz.blob', type='file', format='text')
283
286
284 def test_get_binary_file_contents(self):
287 def test_get_binary_file_contents(self):
285 for d, name in self.dirs_nbs:
288 for d, name in self.dirs_nbs:
286 path = url_path_join(d, name + '.blob')
289 path = url_path_join(d, name + '.blob')
287 model = self.api.read(path).json()
290 model = self.api.read(path).json()
288 self.assertEqual(model['name'], u'%s.blob' % name)
291 self.assertEqual(model['name'], u'%s.blob' % name)
289 self.assertEqual(model['path'], path)
292 self.assertEqual(model['path'], path)
290 self.assertIn('content', model)
293 self.assertIn('content', model)
291 self.assertEqual(model['format'], 'base64')
294 self.assertEqual(model['format'], 'base64')
292 self.assertEqual(model['type'], 'file')
295 self.assertEqual(model['type'], 'file')
293 self.assertEqual(
296 self.assertEqual(
294 base64.decodestring(model['content'].encode('ascii')),
297 base64.decodestring(model['content'].encode('ascii')),
295 self._blob_for_name(name),
298 self._blob_for_name(name),
296 )
299 )
297
300
298 # Name that doesn't exist - should be a 404
301 # Name that doesn't exist - should be a 404
299 with assert_http_error(404):
302 with assert_http_error(404):
300 self.api.read('foo/q.txt')
303 self.api.read('foo/q.txt')
301
304
302 def test_get_bad_type(self):
305 def test_get_bad_type(self):
303 with assert_http_error(400):
306 with assert_http_error(400):
304 self.api.read(u'unicodΓ©', type='file') # this is a directory
307 self.api.read(u'unicodΓ©', type='file') # this is a directory
305
308
306 with assert_http_error(400):
309 with assert_http_error(400):
307 self.api.read(u'unicodΓ©/innonascii.ipynb', type='directory')
310 self.api.read(u'unicodΓ©/innonascii.ipynb', type='directory')
308
311
309 def _check_created(self, resp, path, type='notebook'):
312 def _check_created(self, resp, path, type='notebook'):
310 self.assertEqual(resp.status_code, 201)
313 self.assertEqual(resp.status_code, 201)
311 location_header = py3compat.str_to_unicode(resp.headers['Location'])
314 location_header = py3compat.str_to_unicode(resp.headers['Location'])
312 self.assertEqual(location_header, url_escape(url_path_join(u'/api/contents', path)))
315 self.assertEqual(location_header, url_escape(url_path_join(u'/api/contents', path)))
313 rjson = resp.json()
316 rjson = resp.json()
314 self.assertEqual(rjson['name'], path.rsplit('/', 1)[-1])
317 self.assertEqual(rjson['name'], path.rsplit('/', 1)[-1])
315 self.assertEqual(rjson['path'], path)
318 self.assertEqual(rjson['path'], path)
316 self.assertEqual(rjson['type'], type)
319 self.assertEqual(rjson['type'], type)
317 isright = self.isdir if type == 'directory' else self.isfile
320 isright = self.isdir if type == 'directory' else self.isfile
318 assert isright(path)
321 assert isright(path)
319
322
320 def test_create_untitled(self):
323 def test_create_untitled(self):
321 resp = self.api.create_untitled(path=u'Γ₯ b')
324 resp = self.api.create_untitled(path=u'Γ₯ b')
322 self._check_created(resp, u'Γ₯ b/Untitled.ipynb')
325 self._check_created(resp, u'Γ₯ b/Untitled.ipynb')
323
326
324 # Second time
327 # Second time
325 resp = self.api.create_untitled(path=u'Γ₯ b')
328 resp = self.api.create_untitled(path=u'Γ₯ b')
326 self._check_created(resp, u'Γ₯ b/Untitled1.ipynb')
329 self._check_created(resp, u'Γ₯ b/Untitled1.ipynb')
327
330
328 # And two directories down
331 # And two directories down
329 resp = self.api.create_untitled(path='foo/bar')
332 resp = self.api.create_untitled(path='foo/bar')
330 self._check_created(resp, 'foo/bar/Untitled.ipynb')
333 self._check_created(resp, 'foo/bar/Untitled.ipynb')
331
334
332 def test_create_untitled_txt(self):
335 def test_create_untitled_txt(self):
333 resp = self.api.create_untitled(path='foo/bar', ext='.txt')
336 resp = self.api.create_untitled(path='foo/bar', ext='.txt')
334 self._check_created(resp, 'foo/bar/untitled.txt', type='file')
337 self._check_created(resp, 'foo/bar/untitled.txt', type='file')
335
338
336 resp = self.api.read(path='foo/bar/untitled.txt')
339 resp = self.api.read(path='foo/bar/untitled.txt')
337 model = resp.json()
340 model = resp.json()
338 self.assertEqual(model['type'], 'file')
341 self.assertEqual(model['type'], 'file')
339 self.assertEqual(model['format'], 'text')
342 self.assertEqual(model['format'], 'text')
340 self.assertEqual(model['content'], '')
343 self.assertEqual(model['content'], '')
341
344
342 def test_upload(self):
345 def test_upload(self):
343 nb = new_notebook()
346 nb = new_notebook()
344 nbmodel = {'content': nb, 'type': 'notebook'}
347 nbmodel = {'content': nb, 'type': 'notebook'}
345 path = u'Γ₯ b/Upload tΓ©st.ipynb'
348 path = u'Γ₯ b/Upload tΓ©st.ipynb'
346 resp = self.api.upload(path, body=json.dumps(nbmodel))
349 resp = self.api.upload(path, body=json.dumps(nbmodel))
347 self._check_created(resp, path)
350 self._check_created(resp, path)
348
351
349 def test_mkdir_untitled(self):
352 def test_mkdir_untitled(self):
350 resp = self.api.mkdir_untitled(path=u'Γ₯ b')
353 resp = self.api.mkdir_untitled(path=u'Γ₯ b')
351 self._check_created(resp, u'Γ₯ b/Untitled Folder', type='directory')
354 self._check_created(resp, u'Γ₯ b/Untitled Folder', type='directory')
352
355
353 # Second time
356 # Second time
354 resp = self.api.mkdir_untitled(path=u'Γ₯ b')
357 resp = self.api.mkdir_untitled(path=u'Γ₯ b')
355 self._check_created(resp, u'Γ₯ b/Untitled Folder 1', type='directory')
358 self._check_created(resp, u'Γ₯ b/Untitled Folder 1', type='directory')
356
359
357 # And two directories down
360 # And two directories down
358 resp = self.api.mkdir_untitled(path='foo/bar')
361 resp = self.api.mkdir_untitled(path='foo/bar')
359 self._check_created(resp, 'foo/bar/Untitled Folder', type='directory')
362 self._check_created(resp, 'foo/bar/Untitled Folder', type='directory')
360
363
361 def test_mkdir(self):
364 def test_mkdir(self):
362 path = u'Γ₯ b/New βˆ‚ir'
365 path = u'Γ₯ b/New βˆ‚ir'
363 resp = self.api.mkdir(path)
366 resp = self.api.mkdir(path)
364 self._check_created(resp, path, type='directory')
367 self._check_created(resp, path, type='directory')
365
368
366 def test_mkdir_hidden_400(self):
369 def test_mkdir_hidden_400(self):
367 with assert_http_error(400):
370 with assert_http_error(400):
368 resp = self.api.mkdir(u'Γ₯ b/.hidden')
371 resp = self.api.mkdir(u'Γ₯ b/.hidden')
369
372
370 def test_upload_txt(self):
373 def test_upload_txt(self):
371 body = u'ΓΌnicode tΓ©xt'
374 body = u'ΓΌnicode tΓ©xt'
372 model = {
375 model = {
373 'content' : body,
376 'content' : body,
374 'format' : 'text',
377 'format' : 'text',
375 'type' : 'file',
378 'type' : 'file',
376 }
379 }
377 path = u'Γ₯ b/Upload tΓ©st.txt'
380 path = u'Γ₯ b/Upload tΓ©st.txt'
378 resp = self.api.upload(path, body=json.dumps(model))
381 resp = self.api.upload(path, body=json.dumps(model))
379
382
380 # check roundtrip
383 # check roundtrip
381 resp = self.api.read(path)
384 resp = self.api.read(path)
382 model = resp.json()
385 model = resp.json()
383 self.assertEqual(model['type'], 'file')
386 self.assertEqual(model['type'], 'file')
384 self.assertEqual(model['format'], 'text')
387 self.assertEqual(model['format'], 'text')
385 self.assertEqual(model['content'], body)
388 self.assertEqual(model['content'], body)
386
389
387 def test_upload_b64(self):
390 def test_upload_b64(self):
388 body = b'\xFFblob'
391 body = b'\xFFblob'
389 b64body = base64.encodestring(body).decode('ascii')
392 b64body = base64.encodestring(body).decode('ascii')
390 model = {
393 model = {
391 'content' : b64body,
394 'content' : b64body,
392 'format' : 'base64',
395 'format' : 'base64',
393 'type' : 'file',
396 'type' : 'file',
394 }
397 }
395 path = u'Γ₯ b/Upload tΓ©st.blob'
398 path = u'Γ₯ b/Upload tΓ©st.blob'
396 resp = self.api.upload(path, body=json.dumps(model))
399 resp = self.api.upload(path, body=json.dumps(model))
397
400
398 # check roundtrip
401 # check roundtrip
399 resp = self.api.read(path)
402 resp = self.api.read(path)
400 model = resp.json()
403 model = resp.json()
401 self.assertEqual(model['type'], 'file')
404 self.assertEqual(model['type'], 'file')
402 self.assertEqual(model['path'], path)
405 self.assertEqual(model['path'], path)
403 self.assertEqual(model['format'], 'base64')
406 self.assertEqual(model['format'], 'base64')
404 decoded = base64.decodestring(model['content'].encode('ascii'))
407 decoded = base64.decodestring(model['content'].encode('ascii'))
405 self.assertEqual(decoded, body)
408 self.assertEqual(decoded, body)
406
409
407 def test_upload_v2(self):
410 def test_upload_v2(self):
408 nb = v2.new_notebook()
411 nb = v2.new_notebook()
409 ws = v2.new_worksheet()
412 ws = v2.new_worksheet()
410 nb.worksheets.append(ws)
413 nb.worksheets.append(ws)
411 ws.cells.append(v2.new_code_cell(input='print("hi")'))
414 ws.cells.append(v2.new_code_cell(input='print("hi")'))
412 nbmodel = {'content': nb, 'type': 'notebook'}
415 nbmodel = {'content': nb, 'type': 'notebook'}
413 path = u'Γ₯ b/Upload tΓ©st.ipynb'
416 path = u'Γ₯ b/Upload tΓ©st.ipynb'
414 resp = self.api.upload(path, body=json.dumps(nbmodel))
417 resp = self.api.upload(path, body=json.dumps(nbmodel))
415 self._check_created(resp, path)
418 self._check_created(resp, path)
416 resp = self.api.read(path)
419 resp = self.api.read(path)
417 data = resp.json()
420 data = resp.json()
418 self.assertEqual(data['content']['nbformat'], 4)
421 self.assertEqual(data['content']['nbformat'], 4)
419
422
420 def test_copy(self):
423 def test_copy(self):
421 resp = self.api.copy(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b')
424 resp = self.api.copy(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b')
422 self._check_created(resp, u'Γ₯ b/Γ§ d-Copy1.ipynb')
425 self._check_created(resp, u'Γ₯ b/Γ§ d-Copy1.ipynb')
423
426
424 resp = self.api.copy(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b')
427 resp = self.api.copy(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b')
425 self._check_created(resp, u'Γ₯ b/Γ§ d-Copy2.ipynb')
428 self._check_created(resp, u'Γ₯ b/Γ§ d-Copy2.ipynb')
426
429
427 def test_copy_copy(self):
430 def test_copy_copy(self):
428 resp = self.api.copy(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b')
431 resp = self.api.copy(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b')
429 self._check_created(resp, u'Γ₯ b/Γ§ d-Copy1.ipynb')
432 self._check_created(resp, u'Γ₯ b/Γ§ d-Copy1.ipynb')
430
433
431 resp = self.api.copy(u'Γ₯ b/Γ§ d-Copy1.ipynb', u'Γ₯ b')
434 resp = self.api.copy(u'Γ₯ b/Γ§ d-Copy1.ipynb', u'Γ₯ b')
432 self._check_created(resp, u'Γ₯ b/Γ§ d-Copy2.ipynb')
435 self._check_created(resp, u'Γ₯ b/Γ§ d-Copy2.ipynb')
433
436
434 def test_copy_path(self):
437 def test_copy_path(self):
435 resp = self.api.copy(u'foo/a.ipynb', u'Γ₯ b')
438 resp = self.api.copy(u'foo/a.ipynb', u'Γ₯ b')
436 self._check_created(resp, u'Γ₯ b/a.ipynb')
439 self._check_created(resp, u'Γ₯ b/a.ipynb')
437
440
438 resp = self.api.copy(u'foo/a.ipynb', u'Γ₯ b')
441 resp = self.api.copy(u'foo/a.ipynb', u'Γ₯ b')
439 self._check_created(resp, u'Γ₯ b/a-Copy1.ipynb')
442 self._check_created(resp, u'Γ₯ b/a-Copy1.ipynb')
440
443
441 def test_copy_put_400(self):
444 def test_copy_put_400(self):
442 with assert_http_error(400):
445 with assert_http_error(400):
443 resp = self.api.copy_put(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b/cΓΈpy.ipynb')
446 resp = self.api.copy_put(u'Γ₯ b/Γ§ d.ipynb', u'Γ₯ b/cΓΈpy.ipynb')
444
447
445 def test_copy_dir_400(self):
448 def test_copy_dir_400(self):
446 # can't copy directories
449 # can't copy directories
447 with assert_http_error(400):
450 with assert_http_error(400):
448 resp = self.api.copy(u'Γ₯ b', u'foo')
451 resp = self.api.copy(u'Γ₯ b', u'foo')
449
452
450 def test_delete(self):
453 def test_delete(self):
451 for d, name in self.dirs_nbs:
454 for d, name in self.dirs_nbs:
452 print('%r, %r' % (d, name))
455 print('%r, %r' % (d, name))
453 resp = self.api.delete(url_path_join(d, name + '.ipynb'))
456 resp = self.api.delete(url_path_join(d, name + '.ipynb'))
454 self.assertEqual(resp.status_code, 204)
457 self.assertEqual(resp.status_code, 204)
455
458
456 for d in self.dirs + ['/']:
459 for d in self.dirs + ['/']:
457 nbs = notebooks_only(self.api.list(d).json())
460 nbs = notebooks_only(self.api.list(d).json())
458 print('------')
461 print('------')
459 print(d)
462 print(d)
460 print(nbs)
463 print(nbs)
461 self.assertEqual(nbs, [])
464 self.assertEqual(nbs, [])
462
465
463 def test_delete_dirs(self):
466 def test_delete_dirs(self):
464 # depth-first delete everything, so we don't try to delete empty directories
467 # depth-first delete everything, so we don't try to delete empty directories
465 for name in sorted(self.dirs + ['/'], key=len, reverse=True):
468 for name in sorted(self.dirs + ['/'], key=len, reverse=True):
466 listing = self.api.list(name).json()['content']
469 listing = self.api.list(name).json()['content']
467 for model in listing:
470 for model in listing:
468 self.api.delete(model['path'])
471 self.api.delete(model['path'])
469 listing = self.api.list('/').json()['content']
472 listing = self.api.list('/').json()['content']
470 self.assertEqual(listing, [])
473 self.assertEqual(listing, [])
471
474
472 def test_delete_non_empty_dir(self):
475 def test_delete_non_empty_dir(self):
473 """delete non-empty dir raises 400"""
476 """delete non-empty dir raises 400"""
474 with assert_http_error(400):
477 with assert_http_error(400):
475 self.api.delete(u'Γ₯ b')
478 self.api.delete(u'Γ₯ b')
476
479
477 def test_rename(self):
480 def test_rename(self):
478 resp = self.api.rename('foo/a.ipynb', 'foo/z.ipynb')
481 resp = self.api.rename('foo/a.ipynb', 'foo/z.ipynb')
479 self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb')
482 self.assertEqual(resp.headers['Location'].split('/')[-1], 'z.ipynb')
480 self.assertEqual(resp.json()['name'], 'z.ipynb')
483 self.assertEqual(resp.json()['name'], 'z.ipynb')
481 self.assertEqual(resp.json()['path'], 'foo/z.ipynb')
484 self.assertEqual(resp.json()['path'], 'foo/z.ipynb')
482 assert self.isfile('foo/z.ipynb')
485 assert self.isfile('foo/z.ipynb')
483
486
484 nbs = notebooks_only(self.api.list('foo').json())
487 nbs = notebooks_only(self.api.list('foo').json())
485 nbnames = set(n['name'] for n in nbs)
488 nbnames = set(n['name'] for n in nbs)
486 self.assertIn('z.ipynb', nbnames)
489 self.assertIn('z.ipynb', nbnames)
487 self.assertNotIn('a.ipynb', nbnames)
490 self.assertNotIn('a.ipynb', nbnames)
488
491
489 def test_rename_existing(self):
492 def test_rename_existing(self):
490 with assert_http_error(409):
493 with assert_http_error(409):
491 self.api.rename('foo/a.ipynb', 'foo/b.ipynb')
494 self.api.rename('foo/a.ipynb', 'foo/b.ipynb')
492
495
493 def test_save(self):
496 def test_save(self):
494 resp = self.api.read('foo/a.ipynb')
497 resp = self.api.read('foo/a.ipynb')
495 nbcontent = json.loads(resp.text)['content']
498 nbcontent = json.loads(resp.text)['content']
496 nb = from_dict(nbcontent)
499 nb = from_dict(nbcontent)
497 nb.cells.append(new_markdown_cell(u'Created by test Β³'))
500 nb.cells.append(new_markdown_cell(u'Created by test Β³'))
498
501
499 nbmodel= {'content': nb, 'type': 'notebook'}
502 nbmodel= {'content': nb, 'type': 'notebook'}
500 resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel))
503 resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel))
501
504
502 nbcontent = self.api.read('foo/a.ipynb').json()['content']
505 nbcontent = self.api.read('foo/a.ipynb').json()['content']
503 newnb = from_dict(nbcontent)
506 newnb = from_dict(nbcontent)
504 self.assertEqual(newnb.cells[0].source,
507 self.assertEqual(newnb.cells[0].source,
505 u'Created by test Β³')
508 u'Created by test Β³')
506
509
507 def test_checkpoints(self):
510 def test_checkpoints(self):
508 resp = self.api.read('foo/a.ipynb')
511 resp = self.api.read('foo/a.ipynb')
509 r = self.api.new_checkpoint('foo/a.ipynb')
512 r = self.api.new_checkpoint('foo/a.ipynb')
510 self.assertEqual(r.status_code, 201)
513 self.assertEqual(r.status_code, 201)
511 cp1 = r.json()
514 cp1 = r.json()
512 self.assertEqual(set(cp1), {'id', 'last_modified'})
515 self.assertEqual(set(cp1), {'id', 'last_modified'})
513 self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id'])
516 self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id'])
514
517
515 # Modify it
518 # Modify it
516 nbcontent = json.loads(resp.text)['content']
519 nbcontent = json.loads(resp.text)['content']
517 nb = from_dict(nbcontent)
520 nb = from_dict(nbcontent)
518 hcell = new_markdown_cell('Created by test')
521 hcell = new_markdown_cell('Created by test')
519 nb.cells.append(hcell)
522 nb.cells.append(hcell)
520 # Save
523 # Save
521 nbmodel= {'content': nb, 'type': 'notebook'}
524 nbmodel= {'content': nb, 'type': 'notebook'}
522 resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel))
525 resp = self.api.save('foo/a.ipynb', body=json.dumps(nbmodel))
523
526
524 # List checkpoints
527 # List checkpoints
525 cps = self.api.get_checkpoints('foo/a.ipynb').json()
528 cps = self.api.get_checkpoints('foo/a.ipynb').json()
526 self.assertEqual(cps, [cp1])
529 self.assertEqual(cps, [cp1])
527
530
528 nbcontent = self.api.read('foo/a.ipynb').json()['content']
531 nbcontent = self.api.read('foo/a.ipynb').json()['content']
529 nb = from_dict(nbcontent)
532 nb = from_dict(nbcontent)
530 self.assertEqual(nb.cells[0].source, 'Created by test')
533 self.assertEqual(nb.cells[0].source, 'Created by test')
531
534
532 # Restore cp1
535 # Restore cp1
533 r = self.api.restore_checkpoint('foo/a.ipynb', cp1['id'])
536 r = self.api.restore_checkpoint('foo/a.ipynb', cp1['id'])
534 self.assertEqual(r.status_code, 204)
537 self.assertEqual(r.status_code, 204)
535 nbcontent = self.api.read('foo/a.ipynb').json()['content']
538 nbcontent = self.api.read('foo/a.ipynb').json()['content']
536 nb = from_dict(nbcontent)
539 nb = from_dict(nbcontent)
537 self.assertEqual(nb.cells, [])
540 self.assertEqual(nb.cells, [])
538
541
539 # Delete cp1
542 # Delete cp1
540 r = self.api.delete_checkpoint('foo/a.ipynb', cp1['id'])
543 r = self.api.delete_checkpoint('foo/a.ipynb', cp1['id'])
541 self.assertEqual(r.status_code, 204)
544 self.assertEqual(r.status_code, 204)
542 cps = self.api.get_checkpoints('foo/a.ipynb').json()
545 cps = self.api.get_checkpoints('foo/a.ipynb').json()
543 self.assertEqual(cps, [])
546 self.assertEqual(cps, [])
544
547
545 def test_file_checkpoints(self):
548 def test_file_checkpoints(self):
546 """
549 """
547 Test checkpointing of non-notebook files.
550 Test checkpointing of non-notebook files.
548 """
551 """
549 filename = 'foo/a.txt'
552 filename = 'foo/a.txt'
550 resp = self.api.read(filename)
553 resp = self.api.read(filename)
551 orig_content = json.loads(resp.text)['content']
554 orig_content = json.loads(resp.text)['content']
552
555
553 # Create a checkpoint.
556 # Create a checkpoint.
554 r = self.api.new_checkpoint(filename)
557 r = self.api.new_checkpoint(filename)
555 self.assertEqual(r.status_code, 201)
558 self.assertEqual(r.status_code, 201)
556 cp1 = r.json()
559 cp1 = r.json()
557 self.assertEqual(set(cp1), {'id', 'last_modified'})
560 self.assertEqual(set(cp1), {'id', 'last_modified'})
558 self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id'])
561 self.assertEqual(r.headers['Location'].split('/')[-1], cp1['id'])
559
562
560 # Modify the file and save.
563 # Modify the file and save.
561 new_content = orig_content + '\nsecond line'
564 new_content = orig_content + '\nsecond line'
562 model = {
565 model = {
563 'content': new_content,
566 'content': new_content,
564 'type': 'file',
567 'type': 'file',
565 'format': 'text',
568 'format': 'text',
566 }
569 }
567 resp = self.api.save(filename, body=json.dumps(model))
570 resp = self.api.save(filename, body=json.dumps(model))
568
571
569 # List checkpoints
572 # List checkpoints
570 cps = self.api.get_checkpoints(filename).json()
573 cps = self.api.get_checkpoints(filename).json()
571 self.assertEqual(cps, [cp1])
574 self.assertEqual(cps, [cp1])
572
575
573 content = self.api.read(filename).json()['content']
576 content = self.api.read(filename).json()['content']
574 self.assertEqual(content, new_content)
577 self.assertEqual(content, new_content)
575
578
576 # Restore cp1
579 # Restore cp1
577 r = self.api.restore_checkpoint(filename, cp1['id'])
580 r = self.api.restore_checkpoint(filename, cp1['id'])
578 self.assertEqual(r.status_code, 204)
581 self.assertEqual(r.status_code, 204)
579 restored_content = self.api.read(filename).json()['content']
582 restored_content = self.api.read(filename).json()['content']
580 self.assertEqual(restored_content, orig_content)
583 self.assertEqual(restored_content, orig_content)
581
584
582 # Delete cp1
585 # Delete cp1
583 r = self.api.delete_checkpoint(filename, cp1['id'])
586 r = self.api.delete_checkpoint(filename, cp1['id'])
584 self.assertEqual(r.status_code, 204)
587 self.assertEqual(r.status_code, 204)
585 cps = self.api.get_checkpoints(filename).json()
588 cps = self.api.get_checkpoints(filename).json()
586 self.assertEqual(cps, [])
589 self.assertEqual(cps, [])
587
590
588 @contextmanager
591 @contextmanager
589 def patch_cp_root(self, dirname):
592 def patch_cp_root(self, dirname):
590 """
593 """
591 Temporarily patch the root dir of our checkpoint manager.
594 Temporarily patch the root dir of our checkpoint manager.
592 """
595 """
593 cpm = self.notebook.contents_manager.checkpoint_manager
596 cpm = self.notebook.contents_manager.checkpoint_manager
594 old_dirname = cpm.root_dir
597 old_dirname = cpm.root_dir
595 cpm.root_dir = dirname
598 cpm.root_dir = dirname
596 try:
599 try:
597 yield
600 yield
598 finally:
601 finally:
599 cpm.root_dir = old_dirname
602 cpm.root_dir = old_dirname
600
603
601 def test_checkpoints_separate_root(self):
604 def test_checkpoints_separate_root(self):
602 """
605 """
603 Test that FileCheckpointManager functions correctly even when it's
606 Test that FileCheckpointManager functions correctly even when it's
604 using a different root dir from FileContentsManager. This also keeps
607 using a different root dir from FileContentsManager. This also keeps
605 the implementation honest for use with ContentsManagers that don't map
608 the implementation honest for use with ContentsManagers that don't map
606 models to the filesystem
609 models to the filesystem
607
610
608 Override this method to a no-op when testing other managers.
611 Override this method to a no-op when testing other managers.
609 """
612 """
610 with TemporaryDirectory() as td:
613 with TemporaryDirectory() as td:
611 with self.patch_cp_root(td):
614 with self.patch_cp_root(td):
612 self.test_checkpoints()
615 self.test_checkpoints()
613
616
614 with TemporaryDirectory() as td:
617 with TemporaryDirectory() as td:
615 with self.patch_cp_root(td):
618 with self.patch_cp_root(td):
616 self.test_file_checkpoints()
619 self.test_file_checkpoints()
617
620
618 @contextmanager
619 def patch_cm_backend(self):
620 """
621 Temporarily patch our ContentsManager to present a different backend.
622 """
623 mgr = self.notebook.contents_manager
624 old_backend = mgr.backend
625 mgr.backend = ""
626 try:
627 yield
628 finally:
629 mgr.backend = old_backend
630
631 def test_checkpoints_empty_backend(self):
632 with self.patch_cm_backend():
633 self.test_checkpoints()
634
635 with self.patch_cm_backend():
636 self.test_file_checkpoints()
637
621
622 class GenericFileCheckpointsAPITest(APITest):
623 """
624 Run the tests from APITest with GenericFileCheckpointManager.
625 """
638
626
627 config = Config()
628 config.FileContentsManager.checkpoint_manager_class = \
629 GenericFileCheckpointManager
General Comments 0
You need to be logged in to leave comments. Login now