Show More
@@ -1,186 +1,191 | |||||
1 | """Tornado handlers for the notebook. |
|
1 | """Tornado handlers for the notebook. | |
2 |
|
2 | |||
3 | Authors: |
|
3 | Authors: | |
4 |
|
4 | |||
5 | * Brian Granger |
|
5 | * Brian Granger | |
6 | """ |
|
6 | """ | |
7 |
|
7 | |||
8 | #----------------------------------------------------------------------------- |
|
8 | #----------------------------------------------------------------------------- | |
9 | # Copyright (C) 2008-2011 The IPython Development Team |
|
9 | # Copyright (C) 2008-2011 The IPython Development Team | |
10 | # |
|
10 | # | |
11 | # Distributed under the terms of the BSD License. The full license is in |
|
11 | # Distributed under the terms of the BSD License. The full license is in | |
12 | # the file COPYING, distributed as part of this software. |
|
12 | # the file COPYING, distributed as part of this software. | |
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 | #----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
16 | # Imports |
|
16 | # Imports | |
17 | #----------------------------------------------------------------------------- |
|
17 | #----------------------------------------------------------------------------- | |
18 |
|
18 | |||
19 | import logging |
|
19 | import logging | |
20 | from tornado import web |
|
20 | from tornado import web | |
21 |
|
21 | |||
22 | from zmq.utils import jsonapi |
|
22 | from zmq.utils import jsonapi | |
23 |
|
23 | |||
24 | from IPython.utils.jsonutil import date_default |
|
24 | from IPython.utils.jsonutil import date_default | |
25 |
|
25 | |||
26 | from ...base.handlers import IPythonHandler |
|
26 | from ...base.handlers import IPythonHandler, json_errors | |
27 | from ...base.zmqhandlers import AuthenticatedZMQStreamHandler |
|
27 | from ...base.zmqhandlers import AuthenticatedZMQStreamHandler | |
28 |
|
28 | |||
29 | #----------------------------------------------------------------------------- |
|
29 | #----------------------------------------------------------------------------- | |
30 | # Kernel handlers |
|
30 | # Kernel handlers | |
31 | #----------------------------------------------------------------------------- |
|
31 | #----------------------------------------------------------------------------- | |
32 |
|
32 | |||
33 |
|
33 | |||
34 | class MainKernelHandler(IPythonHandler): |
|
34 | class MainKernelHandler(IPythonHandler): | |
35 |
|
35 | |||
36 | @web.authenticated |
|
36 | @web.authenticated | |
|
37 | @json_errors | |||
37 | def get(self): |
|
38 | def get(self): | |
38 | km = self.kernel_manager |
|
39 | km = self.kernel_manager | |
39 | self.finish(jsonapi.dumps(km.list_kernels())) |
|
40 | self.finish(jsonapi.dumps(km.list_kernels(self.ws_url))) | |
40 |
|
41 | |||
41 | @web.authenticated |
|
42 | @web.authenticated | |
|
43 | @json_errors | |||
42 | def post(self): |
|
44 | def post(self): | |
43 | km = self.kernel_manager |
|
45 | km = self.kernel_manager | |
44 | kernel_id = km.start_kernel() |
|
46 | kernel_id = km.start_kernel() | |
45 | model = km.kernel_model(kernel_id, self.ws_url) |
|
47 | model = km.kernel_model(kernel_id, self.ws_url) | |
46 | self.set_header('Location', '{0}kernels/{1}'.format(self.base_kernel_url, kernel_id)) |
|
48 | self.set_header('Location', '{0}kernels/{1}'.format(self.base_kernel_url, kernel_id)) | |
47 | self.finish(jsonapi.dumps(model)) |
|
49 | self.finish(jsonapi.dumps(model)) | |
48 |
|
50 | |||
49 |
|
51 | |||
50 | class KernelHandler(IPythonHandler): |
|
52 | class KernelHandler(IPythonHandler): | |
51 |
|
53 | |||
52 | SUPPORTED_METHODS = ('DELETE', 'GET') |
|
54 | SUPPORTED_METHODS = ('DELETE', 'GET') | |
53 |
|
55 | |||
54 | @web.authenticated |
|
56 | @web.authenticated | |
|
57 | @json_errors | |||
55 | def get(self, kernel_id): |
|
58 | def get(self, kernel_id): | |
56 | km = self.kernel_manager |
|
59 | km = self.kernel_manager | |
57 | model = km.kernel_model(kernel_id, self.ws_url) |
|
60 | model = km.kernel_model(kernel_id, self.ws_url) | |
58 | self.finish(jsonapi.dumps(model)) |
|
61 | self.finish(jsonapi.dumps(model)) | |
59 |
|
62 | |||
60 | @web.authenticated |
|
63 | @web.authenticated | |
|
64 | @json_errors | |||
61 | def delete(self, kernel_id): |
|
65 | def delete(self, kernel_id): | |
62 | km = self.kernel_manager |
|
66 | km = self.kernel_manager | |
63 | km.shutdown_kernel(kernel_id) |
|
67 | km.shutdown_kernel(kernel_id) | |
64 | self.set_status(204) |
|
68 | self.set_status(204) | |
65 | self.finish() |
|
69 | self.finish() | |
66 |
|
70 | |||
67 |
|
71 | |||
68 | class KernelActionHandler(IPythonHandler): |
|
72 | class KernelActionHandler(IPythonHandler): | |
69 |
|
73 | |||
70 | @web.authenticated |
|
74 | @web.authenticated | |
|
75 | @json_errors | |||
71 | def post(self, kernel_id, action): |
|
76 | def post(self, kernel_id, action): | |
72 | km = self.kernel_manager |
|
77 | km = self.kernel_manager | |
73 | if action == 'interrupt': |
|
78 | if action == 'interrupt': | |
74 | km.interrupt_kernel(kernel_id) |
|
79 | km.interrupt_kernel(kernel_id) | |
75 | self.set_status(204) |
|
80 | self.set_status(204) | |
76 | if action == 'restart': |
|
81 | if action == 'restart': | |
77 | km.restart_kernel(kernel_id) |
|
82 | km.restart_kernel(kernel_id) | |
78 | model = km.kernel_model(kernel_id, self.ws_url) |
|
83 | model = km.kernel_model(kernel_id, self.ws_url) | |
79 | self.set_header('Location', '{0}api/kernels/{1}'.format(self.base_kernel_url, kernel_id)) |
|
84 | self.set_header('Location', '{0}api/kernels/{1}'.format(self.base_kernel_url, kernel_id)) | |
80 | self.write(jsonapi.dumps(model)) |
|
85 | self.write(jsonapi.dumps(model)) | |
81 | self.finish() |
|
86 | self.finish() | |
82 |
|
87 | |||
83 |
|
88 | |||
84 | class ZMQChannelHandler(AuthenticatedZMQStreamHandler): |
|
89 | class ZMQChannelHandler(AuthenticatedZMQStreamHandler): | |
85 |
|
90 | |||
86 | def create_stream(self): |
|
91 | def create_stream(self): | |
87 | km = self.kernel_manager |
|
92 | km = self.kernel_manager | |
88 | meth = getattr(km, 'connect_%s' % self.channel) |
|
93 | meth = getattr(km, 'connect_%s' % self.channel) | |
89 | self.zmq_stream = meth(self.kernel_id, identity=self.session.bsession) |
|
94 | self.zmq_stream = meth(self.kernel_id, identity=self.session.bsession) | |
90 |
|
95 | |||
91 | def initialize(self, *args, **kwargs): |
|
96 | def initialize(self, *args, **kwargs): | |
92 | self.zmq_stream = None |
|
97 | self.zmq_stream = None | |
93 |
|
98 | |||
94 | def on_first_message(self, msg): |
|
99 | def on_first_message(self, msg): | |
95 | try: |
|
100 | try: | |
96 | super(ZMQChannelHandler, self).on_first_message(msg) |
|
101 | super(ZMQChannelHandler, self).on_first_message(msg) | |
97 | except web.HTTPError: |
|
102 | except web.HTTPError: | |
98 | self.close() |
|
103 | self.close() | |
99 | return |
|
104 | return | |
100 | try: |
|
105 | try: | |
101 | self.create_stream() |
|
106 | self.create_stream() | |
102 | except web.HTTPError: |
|
107 | except web.HTTPError: | |
103 | # WebSockets don't response to traditional error codes so we |
|
108 | # WebSockets don't response to traditional error codes so we | |
104 | # close the connection. |
|
109 | # close the connection. | |
105 | if not self.stream.closed(): |
|
110 | if not self.stream.closed(): | |
106 | self.stream.close() |
|
111 | self.stream.close() | |
107 | self.close() |
|
112 | self.close() | |
108 | else: |
|
113 | else: | |
109 | self.zmq_stream.on_recv(self._on_zmq_reply) |
|
114 | self.zmq_stream.on_recv(self._on_zmq_reply) | |
110 |
|
115 | |||
111 | def on_message(self, msg): |
|
116 | def on_message(self, msg): | |
112 | msg = jsonapi.loads(msg) |
|
117 | msg = jsonapi.loads(msg) | |
113 | self.session.send(self.zmq_stream, msg) |
|
118 | self.session.send(self.zmq_stream, msg) | |
114 |
|
119 | |||
115 | def on_close(self): |
|
120 | def on_close(self): | |
116 | # This method can be called twice, once by self.kernel_died and once |
|
121 | # This method can be called twice, once by self.kernel_died and once | |
117 | # from the WebSocket close event. If the WebSocket connection is |
|
122 | # from the WebSocket close event. If the WebSocket connection is | |
118 | # closed before the ZMQ streams are setup, they could be None. |
|
123 | # closed before the ZMQ streams are setup, they could be None. | |
119 | if self.zmq_stream is not None and not self.zmq_stream.closed(): |
|
124 | if self.zmq_stream is not None and not self.zmq_stream.closed(): | |
120 | self.zmq_stream.on_recv(None) |
|
125 | self.zmq_stream.on_recv(None) | |
121 | self.zmq_stream.close() |
|
126 | self.zmq_stream.close() | |
122 |
|
127 | |||
123 |
|
128 | |||
124 | class IOPubHandler(ZMQChannelHandler): |
|
129 | class IOPubHandler(ZMQChannelHandler): | |
125 | channel = 'iopub' |
|
130 | channel = 'iopub' | |
126 |
|
131 | |||
127 | def create_stream(self): |
|
132 | def create_stream(self): | |
128 | super(IOPubHandler, self).create_stream() |
|
133 | super(IOPubHandler, self).create_stream() | |
129 | km = self.kernel_manager |
|
134 | km = self.kernel_manager | |
130 | km.add_restart_callback(self.kernel_id, self.on_kernel_restarted) |
|
135 | km.add_restart_callback(self.kernel_id, self.on_kernel_restarted) | |
131 | km.add_restart_callback(self.kernel_id, self.on_restart_failed, 'dead') |
|
136 | km.add_restart_callback(self.kernel_id, self.on_restart_failed, 'dead') | |
132 |
|
137 | |||
133 | def on_close(self): |
|
138 | def on_close(self): | |
134 | km = self.kernel_manager |
|
139 | km = self.kernel_manager | |
135 | if self.kernel_id in km: |
|
140 | if self.kernel_id in km: | |
136 | km.remove_restart_callback( |
|
141 | km.remove_restart_callback( | |
137 | self.kernel_id, self.on_kernel_restarted, |
|
142 | self.kernel_id, self.on_kernel_restarted, | |
138 | ) |
|
143 | ) | |
139 | km.remove_restart_callback( |
|
144 | km.remove_restart_callback( | |
140 | self.kernel_id, self.on_restart_failed, 'dead', |
|
145 | self.kernel_id, self.on_restart_failed, 'dead', | |
141 | ) |
|
146 | ) | |
142 | super(IOPubHandler, self).on_close() |
|
147 | super(IOPubHandler, self).on_close() | |
143 |
|
148 | |||
144 | def _send_status_message(self, status): |
|
149 | def _send_status_message(self, status): | |
145 | msg = self.session.msg("status", |
|
150 | msg = self.session.msg("status", | |
146 | {'execution_state': status} |
|
151 | {'execution_state': status} | |
147 | ) |
|
152 | ) | |
148 | self.write_message(jsonapi.dumps(msg, default=date_default)) |
|
153 | self.write_message(jsonapi.dumps(msg, default=date_default)) | |
149 |
|
154 | |||
150 | def on_kernel_restarted(self): |
|
155 | def on_kernel_restarted(self): | |
151 | logging.warn("kernel %s restarted", self.kernel_id) |
|
156 | logging.warn("kernel %s restarted", self.kernel_id) | |
152 | self._send_status_message('restarting') |
|
157 | self._send_status_message('restarting') | |
153 |
|
158 | |||
154 | def on_restart_failed(self): |
|
159 | def on_restart_failed(self): | |
155 | logging.error("kernel %s restarted failed!", self.kernel_id) |
|
160 | logging.error("kernel %s restarted failed!", self.kernel_id) | |
156 | self._send_status_message('dead') |
|
161 | self._send_status_message('dead') | |
157 |
|
162 | |||
158 | def on_message(self, msg): |
|
163 | def on_message(self, msg): | |
159 | """IOPub messages make no sense""" |
|
164 | """IOPub messages make no sense""" | |
160 | pass |
|
165 | pass | |
161 |
|
166 | |||
162 |
|
167 | |||
163 | class ShellHandler(ZMQChannelHandler): |
|
168 | class ShellHandler(ZMQChannelHandler): | |
164 | channel = 'shell' |
|
169 | channel = 'shell' | |
165 |
|
170 | |||
166 |
|
171 | |||
167 | class StdinHandler(ZMQChannelHandler): |
|
172 | class StdinHandler(ZMQChannelHandler): | |
168 | channel = 'stdin' |
|
173 | channel = 'stdin' | |
169 |
|
174 | |||
170 |
|
175 | |||
171 | #----------------------------------------------------------------------------- |
|
176 | #----------------------------------------------------------------------------- | |
172 | # URL to handler mappings |
|
177 | # URL to handler mappings | |
173 | #----------------------------------------------------------------------------- |
|
178 | #----------------------------------------------------------------------------- | |
174 |
|
179 | |||
175 |
|
180 | |||
176 | _kernel_id_regex = r"(?P<kernel_id>\w+-\w+-\w+-\w+-\w+)" |
|
181 | _kernel_id_regex = r"(?P<kernel_id>\w+-\w+-\w+-\w+-\w+)" | |
177 | _kernel_action_regex = r"(?P<action>restart|interrupt)" |
|
182 | _kernel_action_regex = r"(?P<action>restart|interrupt)" | |
178 |
|
183 | |||
179 | default_handlers = [ |
|
184 | default_handlers = [ | |
180 | (r"/api/kernels", MainKernelHandler), |
|
185 | (r"/api/kernels", MainKernelHandler), | |
181 | (r"/api/kernels/%s" % _kernel_id_regex, KernelHandler), |
|
186 | (r"/api/kernels/%s" % _kernel_id_regex, KernelHandler), | |
182 | (r"/api/kernels/%s/%s" % (_kernel_id_regex, _kernel_action_regex), KernelActionHandler), |
|
187 | (r"/api/kernels/%s/%s" % (_kernel_id_regex, _kernel_action_regex), KernelActionHandler), | |
183 | (r"/api/kernels/%s/iopub" % _kernel_id_regex, IOPubHandler), |
|
188 | (r"/api/kernels/%s/iopub" % _kernel_id_regex, IOPubHandler), | |
184 | (r"/api/kernels/%s/shell" % _kernel_id_regex, ShellHandler), |
|
189 | (r"/api/kernels/%s/shell" % _kernel_id_regex, ShellHandler), | |
185 | (r"/api/kernels/%s/stdin" % _kernel_id_regex, StdinHandler) |
|
190 | (r"/api/kernels/%s/stdin" % _kernel_id_regex, StdinHandler) | |
186 | ] |
|
191 | ] |
@@ -1,90 +1,95 | |||||
1 | """A kernel manager relating notebooks and kernels |
|
1 | """A kernel manager relating notebooks and kernels | |
2 |
|
2 | |||
3 | Authors: |
|
3 | Authors: | |
4 |
|
4 | |||
5 | * Brian Granger |
|
5 | * Brian Granger | |
6 | """ |
|
6 | """ | |
7 |
|
7 | |||
8 | #----------------------------------------------------------------------------- |
|
8 | #----------------------------------------------------------------------------- | |
9 | # Copyright (C) 2013 The IPython Development Team |
|
9 | # Copyright (C) 2013 The IPython Development Team | |
10 | # |
|
10 | # | |
11 | # Distributed under the terms of the BSD License. The full license is in |
|
11 | # Distributed under the terms of the BSD License. The full license is in | |
12 | # the file COPYING, distributed as part of this software. |
|
12 | # the file COPYING, distributed as part of this software. | |
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 | #----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
16 | # Imports |
|
16 | # Imports | |
17 | #----------------------------------------------------------------------------- |
|
17 | #----------------------------------------------------------------------------- | |
18 |
|
18 | |||
19 | from tornado import web |
|
19 | from tornado import web | |
20 |
|
20 | |||
21 | from IPython.kernel.multikernelmanager import MultiKernelManager |
|
21 | from IPython.kernel.multikernelmanager import MultiKernelManager | |
22 | from IPython.utils.traitlets import ( |
|
22 | from IPython.utils.traitlets import ( | |
23 | Dict, List, Unicode, |
|
23 | Dict, List, Unicode, | |
24 | ) |
|
24 | ) | |
25 |
|
25 | |||
26 | #----------------------------------------------------------------------------- |
|
26 | #----------------------------------------------------------------------------- | |
27 | # Classes |
|
27 | # Classes | |
28 | #----------------------------------------------------------------------------- |
|
28 | #----------------------------------------------------------------------------- | |
29 |
|
29 | |||
30 |
|
30 | |||
31 | class MappingKernelManager(MultiKernelManager): |
|
31 | class MappingKernelManager(MultiKernelManager): | |
32 | """A KernelManager that handles notebook mapping and HTTP error handling""" |
|
32 | """A KernelManager that handles notebook mapping and HTTP error handling""" | |
33 |
|
33 | |||
34 | def _kernel_manager_class_default(self): |
|
34 | def _kernel_manager_class_default(self): | |
35 | return "IPython.kernel.ioloop.IOLoopKernelManager" |
|
35 | return "IPython.kernel.ioloop.IOLoopKernelManager" | |
36 |
|
36 | |||
37 | kernel_argv = List(Unicode) |
|
37 | kernel_argv = List(Unicode) | |
38 |
|
38 | |||
39 | #------------------------------------------------------------------------- |
|
39 | #------------------------------------------------------------------------- | |
40 | # Methods for managing kernels and sessions |
|
40 | # Methods for managing kernels and sessions | |
41 | #------------------------------------------------------------------------- |
|
41 | #------------------------------------------------------------------------- | |
42 |
|
42 | |||
43 | def _handle_kernel_died(self, kernel_id): |
|
43 | def _handle_kernel_died(self, kernel_id): | |
44 | """notice that a kernel died""" |
|
44 | """notice that a kernel died""" | |
45 | self.log.warn("Kernel %s died, removing from map.", kernel_id) |
|
45 | self.log.warn("Kernel %s died, removing from map.", kernel_id) | |
46 | self.remove_kernel(kernel_id) |
|
46 | self.remove_kernel(kernel_id) | |
47 |
|
47 | |||
48 | def start_kernel(self, kernel_id=None, **kwargs): |
|
48 | def start_kernel(self, kernel_id=None, **kwargs): | |
49 | """Start a kernel for a session an return its kernel_id. |
|
49 | """Start a kernel for a session an return its kernel_id. | |
50 |
|
50 | |||
51 | Parameters |
|
51 | Parameters | |
52 | ---------- |
|
52 | ---------- | |
53 | kernel_id : uuid |
|
53 | kernel_id : uuid | |
54 | The uuid to associate the new kernel with. If this |
|
54 | The uuid to associate the new kernel with. If this | |
55 | is not None, this kernel will be persistent whenever it is |
|
55 | is not None, this kernel will be persistent whenever it is | |
56 | requested. |
|
56 | requested. | |
57 | """ |
|
57 | """ | |
58 | if kernel_id is None: |
|
58 | if kernel_id is None: | |
59 | kwargs['extra_arguments'] = self.kernel_argv |
|
59 | kwargs['extra_arguments'] = self.kernel_argv | |
60 | kernel_id = super(MappingKernelManager, self).start_kernel(**kwargs) |
|
60 | kernel_id = super(MappingKernelManager, self).start_kernel(**kwargs) | |
61 | self.log.info("Kernel started: %s" % kernel_id) |
|
61 | self.log.info("Kernel started: %s" % kernel_id) | |
62 | self.log.debug("Kernel args: %r" % kwargs) |
|
62 | self.log.debug("Kernel args: %r" % kwargs) | |
63 | # register callback for failed auto-restart |
|
63 | # register callback for failed auto-restart | |
64 | self.add_restart_callback(kernel_id, |
|
64 | self.add_restart_callback(kernel_id, | |
65 | lambda : self._handle_kernel_died(kernel_id), |
|
65 | lambda : self._handle_kernel_died(kernel_id), | |
66 | 'dead', |
|
66 | 'dead', | |
67 | ) |
|
67 | ) | |
68 | else: |
|
68 | else: | |
69 | self.log.info("Using existing kernel: %s" % kernel_id) |
|
69 | self.log.info("Using existing kernel: %s" % kernel_id) | |
70 | return kernel_id |
|
70 | return kernel_id | |
71 |
|
71 | |||
72 | def shutdown_kernel(self, kernel_id, now=False): |
|
72 | def shutdown_kernel(self, kernel_id, now=False): | |
73 | """Shutdown a kernel by kernel_id""" |
|
73 | """Shutdown a kernel by kernel_id""" | |
74 | super(MappingKernelManager, self).shutdown_kernel(kernel_id, now=now) |
|
74 | super(MappingKernelManager, self).shutdown_kernel(kernel_id, now=now) | |
75 |
|
75 | |||
76 | def kernel_model(self, kernel_id, ws_url): |
|
76 | def kernel_model(self, kernel_id, ws_url): | |
77 | """Return a dictionary of kernel information described in the |
|
77 | """Return a dictionary of kernel information described in the | |
78 | JSON standard model.""" |
|
78 | JSON standard model.""" | |
79 | model = {"id":kernel_id, "ws_url": ws_url} |
|
79 | model = {"id":kernel_id, "ws_url": ws_url} | |
80 | return model |
|
80 | return model | |
81 |
|
81 | |||
82 | def list_kernels(self): |
|
82 | def list_kernels(self, ws_url): | |
83 | """Returns a list of kernel_id's of kernels running.""" |
|
83 | """Returns a list of kernel_id's of kernels running.""" | |
84 | return super(MappingKernelManager, self).list_kernel_ids() |
|
84 | kernels = [] | |
|
85 | kernel_ids = super(MappingKernelManager, self).list_kernel_ids() | |||
|
86 | for kernel_id in kernel_ids: | |||
|
87 | model = self.kernel_model(kernel_id, ws_url) | |||
|
88 | kernels.append(model) | |||
|
89 | return kernels | |||
85 |
|
90 | |||
86 | # override _check_kernel_id to raise 404 instead of KeyError |
|
91 | # override _check_kernel_id to raise 404 instead of KeyError | |
87 | def _check_kernel_id(self, kernel_id): |
|
92 | def _check_kernel_id(self, kernel_id): | |
88 | """Check a that a kernel_id exists and raise 404 if not.""" |
|
93 | """Check a that a kernel_id exists and raise 404 if not.""" | |
89 | if kernel_id not in self: |
|
94 | if kernel_id not in self: | |
90 | raise web.HTTPError(404, u'Kernel does not exist: %s' % kernel_id) |
|
95 | raise web.HTTPError(404, u'Kernel does not exist: %s' % kernel_id) |
@@ -1,54 +1,54 | |||||
1 | """Test the kernels service API.""" |
|
1 | """Test the kernels service API.""" | |
2 |
|
2 | |||
3 |
|
3 | |||
4 | import os |
|
4 | import os | |
5 | import sys |
|
5 | import sys | |
6 | import json |
|
6 | import json | |
7 |
|
7 | |||
8 | import requests |
|
8 | import requests | |
9 |
|
9 | |||
10 | from IPython.html.utils import url_path_join |
|
10 | from IPython.html.utils import url_path_join | |
11 | from IPython.html.tests.launchnotebook import NotebookTestBase |
|
11 | from IPython.html.tests.launchnotebook import NotebookTestBase | |
12 |
|
12 | |||
13 |
|
13 | |||
14 | class KernelAPITest(NotebookTestBase): |
|
14 | class KernelAPITest(NotebookTestBase): | |
15 | """Test the kernels web service API""" |
|
15 | """Test the kernels web service API""" | |
16 |
|
16 | |||
17 | def base_url(self): |
|
17 | def base_url(self): | |
18 | return url_path_join(super(KernelAPITest,self).base_url(), 'api/kernels') |
|
18 | return url_path_join(super(KernelAPITest,self).base_url(), 'api/kernels') | |
19 |
|
19 | |||
20 | def mkkernel(self): |
|
20 | def mkkernel(self): | |
21 | r = requests.post(self.base_url()) |
|
21 | r = requests.post(self.base_url()) | |
22 | return r.json() |
|
22 | return r.json() | |
23 |
|
23 | |||
24 | def test__no_kernels(self): |
|
24 | def test__no_kernels(self): | |
25 | """Make sure there are no kernels running at the start""" |
|
25 | """Make sure there are no kernels running at the start""" | |
26 | url = self.base_url() |
|
26 | url = self.base_url() | |
27 | r = requests.get(url) |
|
27 | r = requests.get(url) | |
28 | self.assertEqual(r.json(), []) |
|
28 | self.assertEqual(r.json(), []) | |
29 |
|
29 | |||
30 | def test_main_kernel_handler(self): |
|
30 | def test_main_kernel_handler(self): | |
31 | # POST request |
|
31 | # POST request | |
32 | r = requests.post(self.base_url()) |
|
32 | r = requests.post(self.base_url()) | |
33 | data = r.json() |
|
33 | data = r.json() | |
34 | assert isinstance(data, dict) |
|
34 | assert isinstance(data, dict) | |
35 |
|
35 | |||
36 | # GET request |
|
36 | # GET request | |
37 | r = requests.get(self.base_url()) |
|
37 | r = requests.get(self.base_url()) | |
38 | assert isinstance(r.json(), list) |
|
38 | assert isinstance(r.json(), list) | |
39 | self.assertEqual(r.json()[0], data['id']) |
|
39 | self.assertEqual(r.json()[0]['id'], data['id']) | |
40 |
|
40 | |||
41 | def test_kernel_handler(self): |
|
41 | def test_kernel_handler(self): | |
42 | # GET kernel with id |
|
42 | # GET kernel with id | |
43 | data = self.mkkernel() |
|
43 | data = self.mkkernel() | |
44 | url = self.base_url() +'/' + data['id'] |
|
44 | url = self.base_url() +'/' + data['id'] | |
45 | r = requests.get(url) |
|
45 | r = requests.get(url) | |
46 | assert isinstance(r.json(), dict) |
|
46 | assert isinstance(r.json(), dict) | |
47 | self.assertIn('id', r.json()) |
|
47 | self.assertIn('id', r.json()) | |
48 | self.assertEqual(r.json()['id'], data['id']) |
|
48 | self.assertEqual(r.json()['id'], data['id']) | |
49 |
|
49 | |||
50 | # DELETE kernel with id |
|
50 | # DELETE kernel with id | |
51 | r = requests.delete(url) |
|
51 | r = requests.delete(url) | |
52 | self.assertEqual(r.status_code, 204) |
|
52 | self.assertEqual(r.status_code, 204) | |
53 | r = requests.get(self.base_url()) |
|
53 | r = requests.get(self.base_url()) | |
54 | self.assertEqual(r.json(), []) |
|
54 | self.assertEqual(r.json(), []) |
@@ -1,350 +1,350 | |||||
1 | """A notebook manager that uses the local file system for storage. |
|
1 | """A notebook manager that uses the local file system for storage. | |
2 |
|
2 | |||
3 | Authors: |
|
3 | Authors: | |
4 |
|
4 | |||
5 | * Brian Granger |
|
5 | * Brian Granger | |
6 | * Zach Sailer |
|
6 | * Zach Sailer | |
7 | """ |
|
7 | """ | |
8 |
|
8 | |||
9 | #----------------------------------------------------------------------------- |
|
9 | #----------------------------------------------------------------------------- | |
10 | # Copyright (C) 2011 The IPython Development Team |
|
10 | # Copyright (C) 2011 The IPython Development Team | |
11 | # |
|
11 | # | |
12 | # Distributed under the terms of the BSD License. The full license is in |
|
12 | # Distributed under the terms of the BSD License. The full license is in | |
13 | # the file COPYING, distributed as part of this software. |
|
13 | # the file COPYING, distributed as part of this software. | |
14 | #----------------------------------------------------------------------------- |
|
14 | #----------------------------------------------------------------------------- | |
15 |
|
15 | |||
16 | #----------------------------------------------------------------------------- |
|
16 | #----------------------------------------------------------------------------- | |
17 | # Imports |
|
17 | # Imports | |
18 | #----------------------------------------------------------------------------- |
|
18 | #----------------------------------------------------------------------------- | |
19 |
|
19 | |||
20 | import datetime |
|
20 | import datetime | |
21 | import io |
|
21 | import io | |
22 | import os |
|
22 | import os | |
23 | import glob |
|
23 | import glob | |
24 | import shutil |
|
24 | import shutil | |
25 |
|
25 | |||
26 | from unicodedata import normalize |
|
26 | from unicodedata import normalize | |
27 |
|
27 | |||
28 | from tornado import web |
|
28 | from tornado import web | |
29 |
|
29 | |||
30 | from .nbmanager import NotebookManager |
|
30 | from .nbmanager import NotebookManager | |
31 | from IPython.nbformat import current |
|
31 | from IPython.nbformat import current | |
32 | from IPython.utils.traitlets import Unicode, Dict, Bool, TraitError |
|
32 | from IPython.utils.traitlets import Unicode, Dict, Bool, TraitError | |
33 | from IPython.utils import tz |
|
33 | from IPython.utils import tz | |
34 |
|
34 | |||
35 | #----------------------------------------------------------------------------- |
|
35 | #----------------------------------------------------------------------------- | |
36 | # Classes |
|
36 | # Classes | |
37 | #----------------------------------------------------------------------------- |
|
37 | #----------------------------------------------------------------------------- | |
38 |
|
38 | |||
39 | class FileNotebookManager(NotebookManager): |
|
39 | class FileNotebookManager(NotebookManager): | |
40 |
|
40 | |||
41 | save_script = Bool(False, config=True, |
|
41 | save_script = Bool(False, config=True, | |
42 | help="""Automatically create a Python script when saving the notebook. |
|
42 | help="""Automatically create a Python script when saving the notebook. | |
43 |
|
43 | |||
44 | For easier use of import, %run and %load across notebooks, a |
|
44 | For easier use of import, %run and %load across notebooks, a | |
45 | <notebook-name>.py script will be created next to any |
|
45 | <notebook-name>.py script will be created next to any | |
46 | <notebook-name>.ipynb on each save. This can also be set with the |
|
46 | <notebook-name>.ipynb on each save. This can also be set with the | |
47 | short `--script` flag. |
|
47 | short `--script` flag. | |
48 | """ |
|
48 | """ | |
49 | ) |
|
49 | ) | |
50 |
|
50 | |||
51 | checkpoint_dir = Unicode(config=True, |
|
51 | checkpoint_dir = Unicode(config=True, | |
52 | help="""The location in which to keep notebook checkpoints |
|
52 | help="""The location in which to keep notebook checkpoints | |
53 |
|
53 | |||
54 | By default, it is notebook-dir/.ipynb_checkpoints |
|
54 | By default, it is notebook-dir/.ipynb_checkpoints | |
55 | """ |
|
55 | """ | |
56 | ) |
|
56 | ) | |
57 | def _checkpoint_dir_default(self): |
|
57 | def _checkpoint_dir_default(self): | |
58 | return os.path.join(self.notebook_dir, '.ipynb_checkpoints') |
|
58 | return os.path.join(self.notebook_dir, '.ipynb_checkpoints') | |
59 |
|
59 | |||
60 | def _checkpoint_dir_changed(self, name, old, new): |
|
60 | def _checkpoint_dir_changed(self, name, old, new): | |
61 | """do a bit of validation of the checkpoint dir""" |
|
61 | """do a bit of validation of the checkpoint dir""" | |
62 | if not os.path.isabs(new): |
|
62 | if not os.path.isabs(new): | |
63 | # If we receive a non-absolute path, make it absolute. |
|
63 | # If we receive a non-absolute path, make it absolute. | |
64 | abs_new = os.path.abspath(new) |
|
64 | abs_new = os.path.abspath(new) | |
65 | self.checkpoint_dir = abs_new |
|
65 | self.checkpoint_dir = abs_new | |
66 | return |
|
66 | return | |
67 | if os.path.exists(new) and not os.path.isdir(new): |
|
67 | if os.path.exists(new) and not os.path.isdir(new): | |
68 | raise TraitError("checkpoint dir %r is not a directory" % new) |
|
68 | raise TraitError("checkpoint dir %r is not a directory" % new) | |
69 | if not os.path.exists(new): |
|
69 | if not os.path.exists(new): | |
70 | self.log.info("Creating checkpoint dir %s", new) |
|
70 | self.log.info("Creating checkpoint dir %s", new) | |
71 | try: |
|
71 | try: | |
72 | os.mkdir(new) |
|
72 | os.mkdir(new) | |
73 | except: |
|
73 | except: | |
74 | raise TraitError("Couldn't create checkpoint dir %r" % new) |
|
74 | raise TraitError("Couldn't create checkpoint dir %r" % new) | |
75 |
|
75 | |||
76 | def get_notebook_names(self, path='/'): |
|
76 | def get_notebook_names(self, path='/'): | |
77 | """List all notebook names in the notebook dir and path.""" |
|
77 | """List all notebook names in the notebook dir and path.""" | |
78 | names = glob.glob(self.get_os_path('*'+self.filename_ext, path)) |
|
78 | names = glob.glob(self.get_os_path('*'+self.filename_ext, path)) | |
79 | names = [os.path.basename(name) |
|
79 | names = [os.path.basename(name) | |
80 | for name in names] |
|
80 | for name in names] | |
81 | return names |
|
81 | return names | |
82 |
|
82 | |||
83 | def increment_filename(self, basename, path='/'): |
|
83 | def increment_filename(self, basename, path='/'): | |
84 | """Return a non-used filename of the form basename<int>.""" |
|
84 | """Return a non-used filename of the form basename<int>.""" | |
85 | i = 0 |
|
85 | i = 0 | |
86 | while True: |
|
86 | while True: | |
87 | name = u'%s%i.ipynb' % (basename,i) |
|
87 | name = u'%s%i.ipynb' % (basename,i) | |
88 | os_path = self.get_os_path(name, path) |
|
88 | os_path = self.get_os_path(name, path) | |
89 | if not os.path.isfile(os_path): |
|
89 | if not os.path.isfile(os_path): | |
90 | break |
|
90 | break | |
91 | else: |
|
91 | else: | |
92 | i = i+1 |
|
92 | i = i+1 | |
93 | return name |
|
93 | return name | |
94 |
|
94 | |||
95 | def notebook_exists(self, name, path='/'): |
|
95 | def notebook_exists(self, name, path='/'): | |
96 | """Returns a True if the notebook exists. Else, returns False. |
|
96 | """Returns a True if the notebook exists. Else, returns False. | |
97 |
|
97 | |||
98 | Parameters |
|
98 | Parameters | |
99 | ---------- |
|
99 | ---------- | |
100 | name : string |
|
100 | name : string | |
101 | The name of the notebook you are checking. |
|
101 | The name of the notebook you are checking. | |
102 | path : string |
|
102 | path : string | |
103 | The relative path to the notebook (with '/' as separator) |
|
103 | The relative path to the notebook (with '/' as separator) | |
104 |
|
104 | |||
105 | Returns |
|
105 | Returns | |
106 | ------- |
|
106 | ------- | |
107 | bool |
|
107 | bool | |
108 | """ |
|
108 | """ | |
109 | path = self.get_os_path(name, path='/') |
|
109 | path = self.get_os_path(name, path='/') | |
110 | return os.path.isfile(path) |
|
110 | return os.path.isfile(path) | |
111 |
|
111 | |||
112 | def list_notebooks(self, path): |
|
112 | def list_notebooks(self, path): | |
113 | """Returns a list of dictionaries that are the standard model |
|
113 | """Returns a list of dictionaries that are the standard model | |
114 | for all notebooks in the relative 'path'. |
|
114 | for all notebooks in the relative 'path'. | |
115 |
|
115 | |||
116 | Parameters |
|
116 | Parameters | |
117 | ---------- |
|
117 | ---------- | |
118 | path : str |
|
118 | path : str | |
119 | the URL path that describes the relative path for the |
|
119 | the URL path that describes the relative path for the | |
120 | listed notebooks |
|
120 | listed notebooks | |
121 |
|
121 | |||
122 | Returns |
|
122 | Returns | |
123 | ------- |
|
123 | ------- | |
124 | notebooks : list of dicts |
|
124 | notebooks : list of dicts | |
125 | a list of the notebook models without 'content' |
|
125 | a list of the notebook models without 'content' | |
126 | """ |
|
126 | """ | |
127 | notebook_names = self.get_notebook_names(path) |
|
127 | notebook_names = self.get_notebook_names(path) | |
128 | notebooks = [] |
|
128 | notebooks = [] | |
129 | for name in notebook_names: |
|
129 | for name in notebook_names: | |
130 | model = self.get_notebook_model(name, path, content=False) |
|
130 | model = self.get_notebook_model(name, path, content=False) | |
131 | notebooks.append(model) |
|
131 | notebooks.append(model) | |
132 | notebooks = sorted(notebooks, key=lambda item: item['name']) |
|
132 | notebooks = sorted(notebooks, key=lambda item: item['name']) | |
133 | return notebooks |
|
133 | return notebooks | |
134 |
|
134 | |||
135 | def get_notebook_model(self, name, path='/', content=True): |
|
135 | def get_notebook_model(self, name, path='/', content=True): | |
136 | """ Takes a path and name for a notebook and returns it's model |
|
136 | """ Takes a path and name for a notebook and returns it's model | |
137 |
|
137 | |||
138 | Parameters |
|
138 | Parameters | |
139 | ---------- |
|
139 | ---------- | |
140 | name : str |
|
140 | name : str | |
141 | the name of the notebook |
|
141 | the name of the notebook | |
142 | path : str |
|
142 | path : str | |
143 | the URL path that describes the relative path for |
|
143 | the URL path that describes the relative path for | |
144 | the notebook |
|
144 | the notebook | |
145 |
|
145 | |||
146 | Returns |
|
146 | Returns | |
147 | ------- |
|
147 | ------- | |
148 | model : dict |
|
148 | model : dict | |
149 | the notebook model. If contents=True, returns the 'contents' |
|
149 | the notebook model. If contents=True, returns the 'contents' | |
150 | dict in the model as well. |
|
150 | dict in the model as well. | |
151 | """ |
|
151 | """ | |
152 | os_path = self.get_os_path(name, path) |
|
152 | os_path = self.get_os_path(name, path) | |
153 | if not os.path.isfile(os_path): |
|
153 | if not os.path.isfile(os_path): | |
154 | raise web.HTTPError(404, u'Notebook does not exist: %s' % name) |
|
154 | raise web.HTTPError(404, u'Notebook does not exist: %s' % name) | |
155 | info = os.stat(os_path) |
|
155 | info = os.stat(os_path) | |
156 | last_modified = tz.utcfromtimestamp(info.st_mtime) |
|
156 | last_modified = tz.utcfromtimestamp(info.st_mtime) | |
157 | # Create the notebook model. |
|
157 | # Create the notebook model. | |
158 | model ={} |
|
158 | model ={} | |
159 | model['name'] = name |
|
159 | model['name'] = name | |
160 | model['path'] = path |
|
160 | model['path'] = path | |
161 | model['last_modified'] = last_modified |
|
161 | model['last_modified'] = last_modified | |
162 | if content is True: |
|
162 | if content is True: | |
163 | with open(os_path, 'r') as f: |
|
163 | with open(os_path, 'r') as f: | |
164 | try: |
|
164 | try: | |
165 | nb = current.read(f, u'json') |
|
165 | nb = current.read(f, u'json') | |
166 | except Exception as e: |
|
166 | except Exception as e: | |
167 | raise web.HTTPError(400, u"Unreadable Notebook: %s %s" % (os_path, e)) |
|
167 | raise web.HTTPError(400, u"Unreadable Notebook: %s %s" % (os_path, e)) | |
168 | model['content'] = nb |
|
168 | model['content'] = nb | |
169 | return model |
|
169 | return model | |
170 |
|
170 | |||
171 | def save_notebook_model(self, model, name, path='/'): |
|
171 | def save_notebook_model(self, model, name, path='/'): | |
172 | """Save the notebook model and return the model with no content.""" |
|
172 | """Save the notebook model and return the model with no content.""" | |
173 |
|
173 | |||
174 | if 'content' not in model: |
|
174 | if 'content' not in model: | |
175 | raise web.HTTPError(400, u'No notebook JSON data provided') |
|
175 | raise web.HTTPError(400, u'No notebook JSON data provided') | |
176 |
|
176 | |||
177 | new_path = model.get('path', path) |
|
177 | new_path = model.get('path', path) | |
178 | new_name = model.get('name', name) |
|
178 | new_name = model.get('name', name) | |
179 |
|
179 | |||
180 | if path != new_path or name != new_name: |
|
180 | if path != new_path or name != new_name: | |
181 | self.rename_notebook(name, path, new_name, new_path) |
|
181 | self.rename_notebook(name, path, new_name, new_path) | |
182 |
|
182 | |||
183 | # Save the notebook file |
|
183 | # Save the notebook file | |
184 | os_path = self.get_os_path(new_name, new_path) |
|
184 | os_path = self.get_os_path(new_name, new_path) | |
185 | nb = current.to_notebook_json(model['content']) |
|
185 | nb = current.to_notebook_json(model['content']) | |
186 | if 'name' in nb['metadata']: |
|
186 | if 'name' in nb['metadata']: | |
187 | nb['metadata']['name'] = u'' |
|
187 | nb['metadata']['name'] = u'' | |
188 | try: |
|
188 | try: | |
189 | self.log.debug("Autosaving notebook %s", os_path) |
|
189 | self.log.debug("Autosaving notebook %s", os_path) | |
190 | with open(os_path, 'w') as f: |
|
190 | with open(os_path, 'w') as f: | |
191 | current.write(nb, f, u'json') |
|
191 | current.write(nb, f, u'json') | |
192 | except Exception as e: |
|
192 | except Exception as e: | |
193 | raise web.HTTPError(400, u'Unexpected error while autosaving notebook: %s %s' % (os_path, e)) |
|
193 | raise web.HTTPError(400, u'Unexpected error while autosaving notebook: %s %s' % (os_path, e)) | |
194 |
|
194 | |||
195 | # Save .py script as well |
|
195 | # Save .py script as well | |
196 | if self.save_script: |
|
196 | if self.save_script: | |
197 | py_path = os.path.splitext(os_path)[0] + '.py' |
|
197 | py_path = os.path.splitext(os_path)[0] + '.py' | |
198 | self.log.debug("Writing script %s", py_path) |
|
198 | self.log.debug("Writing script %s", py_path) | |
199 | try: |
|
199 | try: | |
200 | with io.open(py_path, 'w', encoding='utf-8') as f: |
|
200 | with io.open(py_path, 'w', encoding='utf-8') as f: | |
201 | current.write(model, f, u'py') |
|
201 | current.write(model, f, u'py') | |
202 | except Exception as e: |
|
202 | except Exception as e: | |
203 | raise web.HTTPError(400, u'Unexpected error while saving notebook as script: %s %s' % (py_path, e)) |
|
203 | raise web.HTTPError(400, u'Unexpected error while saving notebook as script: %s %s' % (py_path, e)) | |
204 |
|
204 | |||
205 | model = self.get_notebook_model(name, path, content=False) |
|
205 | model = self.get_notebook_model(name, path, content=False) | |
206 | return model |
|
206 | return model | |
207 |
|
207 | |||
208 | def update_notebook_model(self, model, name, path='/'): |
|
208 | def update_notebook_model(self, model, name, path='/'): | |
209 | """Update the notebook's path and/or name""" |
|
209 | """Update the notebook's path and/or name""" | |
210 | new_name = model.get('name', name) |
|
210 | new_name = model.get('name', name) | |
211 | new_path = model.get('path', path) |
|
211 | new_path = model.get('path', path) | |
212 | if path != new_path or name != new_name: |
|
212 | if path != new_path or name != new_name: | |
213 | self.rename_notebook(name, path, new_name, new_path) |
|
213 | self.rename_notebook(name, path, new_name, new_path) | |
214 | model = self.get_notebook_model(new_name, new_path, content=False) |
|
214 | model = self.get_notebook_model(new_name, new_path, content=False) | |
215 | return model |
|
215 | return model | |
216 |
|
216 | |||
217 | def delete_notebook_model(self, name, path='/'): |
|
217 | def delete_notebook_model(self, name, path='/'): | |
218 | """Delete notebook by name and path.""" |
|
218 | """Delete notebook by name and path.""" | |
219 | os_path = self.get_os_path(name, path) |
|
219 | os_path = self.get_os_path(name, path) | |
220 | if not os.path.isfile(os_path): |
|
220 | if not os.path.isfile(os_path): | |
221 | raise web.HTTPError(404, u'Notebook does not exist: %s' % os_path) |
|
221 | raise web.HTTPError(404, u'Notebook does not exist: %s' % os_path) | |
222 |
|
222 | |||
223 | # clear checkpoints |
|
223 | # clear checkpoints | |
224 | for checkpoint in self.list_checkpoints(name, path): |
|
224 | for checkpoint in self.list_checkpoints(name, path): | |
225 | checkpoint_id = checkpoint['checkpoint_id'] |
|
225 | checkpoint_id = checkpoint['checkpoint_id'] | |
226 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) |
|
226 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) | |
227 | if os.path.isfile(cp_path): |
|
227 | if os.path.isfile(cp_path): | |
228 | self.log.debug("Unlinking checkpoint %s", cp_path) |
|
228 | self.log.debug("Unlinking checkpoint %s", cp_path) | |
229 | os.unlink(cp_path) |
|
229 | os.unlink(cp_path) | |
230 |
|
230 | |||
231 |
self.log.debug("Unlinking notebook %s", |
|
231 | self.log.debug("Unlinking notebook %s", os_path) | |
232 |
os.unlink( |
|
232 | os.unlink(os_path) | |
233 |
|
233 | |||
234 | def rename_notebook(self, old_name, old_path, new_name, new_path): |
|
234 | def rename_notebook(self, old_name, old_path, new_name, new_path): | |
235 | """Rename a notebook.""" |
|
235 | """Rename a notebook.""" | |
236 | if new_name == old_name and new_path == old_path: |
|
236 | if new_name == old_name and new_path == old_path: | |
237 | return |
|
237 | return | |
238 |
|
238 | |||
239 | new_os_path = self.get_os_path(new_name, new_path) |
|
239 | new_os_path = self.get_os_path(new_name, new_path) | |
240 | old_os_path = self.get_os_path(old_name, old_path) |
|
240 | old_os_path = self.get_os_path(old_name, old_path) | |
241 |
|
241 | |||
242 | # Should we proceed with the move? |
|
242 | # Should we proceed with the move? | |
243 | if os.path.isfile(new_os_path): |
|
243 | if os.path.isfile(new_os_path): | |
244 | raise web.HTTPError(409, u'Notebook with name already exists: ' % new_os_path) |
|
244 | raise web.HTTPError(409, u'Notebook with name already exists: ' % new_os_path) | |
245 | if self.save_script: |
|
245 | if self.save_script: | |
246 | old_py_path = os.path.splitext(old_os_path)[0] + '.py' |
|
246 | old_py_path = os.path.splitext(old_os_path)[0] + '.py' | |
247 | new_py_path = os.path.splitext(new_os_path)[0] + '.py' |
|
247 | new_py_path = os.path.splitext(new_os_path)[0] + '.py' | |
248 | if os.path.isfile(new_py_path): |
|
248 | if os.path.isfile(new_py_path): | |
249 | raise web.HTTPError(409, u'Python script with name already exists: %s' % new_py_path) |
|
249 | raise web.HTTPError(409, u'Python script with name already exists: %s' % new_py_path) | |
250 |
|
250 | |||
251 | # Move the notebook file |
|
251 | # Move the notebook file | |
252 | try: |
|
252 | try: | |
253 | os.rename(old_os_path, new_os_path) |
|
253 | os.rename(old_os_path, new_os_path) | |
254 | except Exception as e: |
|
254 | except Exception as e: | |
255 | raise web.HTTPError(400, u'Unknown error renaming notebook: %s %s' % (old_os_path, e)) |
|
255 | raise web.HTTPError(400, u'Unknown error renaming notebook: %s %s' % (old_os_path, e)) | |
256 |
|
256 | |||
257 | # Move the checkpoints |
|
257 | # Move the checkpoints | |
258 | old_checkpoints = self.list_checkpoints(old_name, old_path) |
|
258 | old_checkpoints = self.list_checkpoints(old_name, old_path) | |
259 | for cp in old_checkpoints: |
|
259 | for cp in old_checkpoints: | |
260 | checkpoint_id = cp['checkpoint_id'] |
|
260 | checkpoint_id = cp['checkpoint_id'] | |
261 | old_cp_path = self.get_checkpoint_path(checkpoint_id, old_name, path) |
|
261 | old_cp_path = self.get_checkpoint_path(checkpoint_id, old_name, path) | |
262 | new_cp_path = self.get_checkpoint_path(checkpoint_id, new_name, path) |
|
262 | new_cp_path = self.get_checkpoint_path(checkpoint_id, new_name, path) | |
263 | if os.path.isfile(old_cp_path): |
|
263 | if os.path.isfile(old_cp_path): | |
264 | self.log.debug("Renaming checkpoint %s -> %s", old_cp_path, new_cp_path) |
|
264 | self.log.debug("Renaming checkpoint %s -> %s", old_cp_path, new_cp_path) | |
265 | os.rename(old_cp_path, new_cp_path) |
|
265 | os.rename(old_cp_path, new_cp_path) | |
266 |
|
266 | |||
267 | # Move the .py script |
|
267 | # Move the .py script | |
268 | if self.save_script: |
|
268 | if self.save_script: | |
269 | os.rename(old_py_path, new_py_path) |
|
269 | os.rename(old_py_path, new_py_path) | |
270 |
|
270 | |||
271 | # Checkpoint-related utilities |
|
271 | # Checkpoint-related utilities | |
272 |
|
272 | |||
273 | def get_checkpoint_path(self, checkpoint_id, name, path='/'): |
|
273 | def get_checkpoint_path(self, checkpoint_id, name, path='/'): | |
274 | """find the path to a checkpoint""" |
|
274 | """find the path to a checkpoint""" | |
275 | filename = u"{name}-{checkpoint_id}{ext}".format( |
|
275 | filename = u"{name}-{checkpoint_id}{ext}".format( | |
276 | name=name, |
|
276 | name=name, | |
277 | checkpoint_id=checkpoint_id, |
|
277 | checkpoint_id=checkpoint_id, | |
278 | ext=self.filename_ext, |
|
278 | ext=self.filename_ext, | |
279 | ) |
|
279 | ) | |
280 | cp_path = os.path.join(path, self.checkpoint_dir, filename) |
|
280 | cp_path = os.path.join(path, self.checkpoint_dir, filename) | |
281 | return cp_path |
|
281 | return cp_path | |
282 |
|
282 | |||
283 | def get_checkpoint_model(self, checkpoint_id, name, path='/'): |
|
283 | def get_checkpoint_model(self, checkpoint_id, name, path='/'): | |
284 | """construct the info dict for a given checkpoint""" |
|
284 | """construct the info dict for a given checkpoint""" | |
285 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) |
|
285 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) | |
286 | stats = os.stat(cp_path) |
|
286 | stats = os.stat(cp_path) | |
287 | last_modified = tz.utcfromtimestamp(stats.st_mtime) |
|
287 | last_modified = tz.utcfromtimestamp(stats.st_mtime) | |
288 | info = dict( |
|
288 | info = dict( | |
289 | checkpoint_id = checkpoint_id, |
|
289 | checkpoint_id = checkpoint_id, | |
290 | last_modified = last_modified, |
|
290 | last_modified = last_modified, | |
291 | ) |
|
291 | ) | |
292 | return info |
|
292 | return info | |
293 |
|
293 | |||
294 | # public checkpoint API |
|
294 | # public checkpoint API | |
295 |
|
295 | |||
296 | def create_checkpoint(self, name, path='/'): |
|
296 | def create_checkpoint(self, name, path='/'): | |
297 | """Create a checkpoint from the current state of a notebook""" |
|
297 | """Create a checkpoint from the current state of a notebook""" | |
298 | nb_path = self.get_os_path(name, path) |
|
298 | nb_path = self.get_os_path(name, path) | |
299 | # only the one checkpoint ID: |
|
299 | # only the one checkpoint ID: | |
300 | checkpoint_id = u"checkpoint" |
|
300 | checkpoint_id = u"checkpoint" | |
301 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) |
|
301 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) | |
302 | self.log.debug("creating checkpoint for notebook %s", name) |
|
302 | self.log.debug("creating checkpoint for notebook %s", name) | |
303 | if not os.path.exists(self.checkpoint_dir): |
|
303 | if not os.path.exists(self.checkpoint_dir): | |
304 | os.mkdir(self.checkpoint_dir) |
|
304 | os.mkdir(self.checkpoint_dir) | |
305 | shutil.copy2(nb_path, cp_path) |
|
305 | shutil.copy2(nb_path, cp_path) | |
306 |
|
306 | |||
307 | # return the checkpoint info |
|
307 | # return the checkpoint info | |
308 | return self.get_checkpoint_model(checkpoint_id, name, path) |
|
308 | return self.get_checkpoint_model(checkpoint_id, name, path) | |
309 |
|
309 | |||
310 | def list_checkpoints(self, name, path='/'): |
|
310 | def list_checkpoints(self, name, path='/'): | |
311 | """list the checkpoints for a given notebook |
|
311 | """list the checkpoints for a given notebook | |
312 |
|
312 | |||
313 | This notebook manager currently only supports one checkpoint per notebook. |
|
313 | This notebook manager currently only supports one checkpoint per notebook. | |
314 | """ |
|
314 | """ | |
315 | checkpoint_id = "checkpoint" |
|
315 | checkpoint_id = "checkpoint" | |
316 | path = self.get_checkpoint_path(checkpoint_id, name, path) |
|
316 | path = self.get_checkpoint_path(checkpoint_id, name, path) | |
317 | if not os.path.exists(path): |
|
317 | if not os.path.exists(path): | |
318 | return [] |
|
318 | return [] | |
319 | else: |
|
319 | else: | |
320 | return [self.get_checkpoint_model(checkpoint_id, name, path)] |
|
320 | return [self.get_checkpoint_model(checkpoint_id, name, path)] | |
321 |
|
321 | |||
322 |
|
322 | |||
323 | def restore_checkpoint(self, checkpoint_id, name, path='/'): |
|
323 | def restore_checkpoint(self, checkpoint_id, name, path='/'): | |
324 | """restore a notebook to a checkpointed state""" |
|
324 | """restore a notebook to a checkpointed state""" | |
325 | self.log.info("restoring Notebook %s from checkpoint %s", name, checkpoint_id) |
|
325 | self.log.info("restoring Notebook %s from checkpoint %s", name, checkpoint_id) | |
326 | nb_path = self.get_os_path(name, path) |
|
326 | nb_path = self.get_os_path(name, path) | |
327 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) |
|
327 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) | |
328 | if not os.path.isfile(cp_path): |
|
328 | if not os.path.isfile(cp_path): | |
329 | self.log.debug("checkpoint file does not exist: %s", cp_path) |
|
329 | self.log.debug("checkpoint file does not exist: %s", cp_path) | |
330 | raise web.HTTPError(404, |
|
330 | raise web.HTTPError(404, | |
331 | u'Notebook checkpoint does not exist: %s-%s' % (name, checkpoint_id) |
|
331 | u'Notebook checkpoint does not exist: %s-%s' % (name, checkpoint_id) | |
332 | ) |
|
332 | ) | |
333 | # ensure notebook is readable (never restore from an unreadable notebook) |
|
333 | # ensure notebook is readable (never restore from an unreadable notebook) | |
334 | with file(cp_path, 'r') as f: |
|
334 | with file(cp_path, 'r') as f: | |
335 | nb = current.read(f, u'json') |
|
335 | nb = current.read(f, u'json') | |
336 | shutil.copy2(cp_path, nb_path) |
|
336 | shutil.copy2(cp_path, nb_path) | |
337 | self.log.debug("copying %s -> %s", cp_path, nb_path) |
|
337 | self.log.debug("copying %s -> %s", cp_path, nb_path) | |
338 |
|
338 | |||
339 | def delete_checkpoint(self, checkpoint_id, name, path='/'): |
|
339 | def delete_checkpoint(self, checkpoint_id, name, path='/'): | |
340 | """delete a notebook's checkpoint""" |
|
340 | """delete a notebook's checkpoint""" | |
341 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) |
|
341 | cp_path = self.get_checkpoint_path(checkpoint_id, name, path) | |
342 | if not os.path.isfile(cp_path): |
|
342 | if not os.path.isfile(cp_path): | |
343 | raise web.HTTPError(404, |
|
343 | raise web.HTTPError(404, | |
344 | u'Notebook checkpoint does not exist: %s%s-%s' % (path, name, checkpoint_id) |
|
344 | u'Notebook checkpoint does not exist: %s%s-%s' % (path, name, checkpoint_id) | |
345 | ) |
|
345 | ) | |
346 | self.log.debug("unlinking %s", cp_path) |
|
346 | self.log.debug("unlinking %s", cp_path) | |
347 | os.unlink(cp_path) |
|
347 | os.unlink(cp_path) | |
348 |
|
348 | |||
349 | def info_string(self): |
|
349 | def info_string(self): | |
350 | return "Serving notebooks from local directory: %s" % self.notebook_dir |
|
350 | return "Serving notebooks from local directory: %s" % self.notebook_dir |
General Comments 0
You need to be logged in to leave comments.
Login now