##// END OF EJS Templates
First version of cluster web service....
Brian Granger -
Show More
@@ -0,0 +1,90 b''
1 """Manage IPython.parallel clusters in the notebook.
2
3 Authors:
4
5 * Brian Granger
6 """
7
8 #-----------------------------------------------------------------------------
9 # Copyright (C) 2008-2011 The IPython Development Team
10 #
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
13 #-----------------------------------------------------------------------------
14
15 #-----------------------------------------------------------------------------
16 # Imports
17 #-----------------------------------------------------------------------------
18
19 import datetime
20 import os
21 import uuid
22 import glob
23
24 from tornado import web
25 from zmq.eventloop import ioloop
26
27 from IPython.config.configurable import LoggingConfigurable
28 from IPython.utils.traitlets import Unicode, List, Dict, Bool
29 from IPython.parallel.apps.launcher import IPClusterLauncher
30 from IPython.core.profileapp import list_profiles_in, list_bundled_profiles
31 from IPython.utils.path import get_ipython_dir, get_ipython_package_dir
32
33 #-----------------------------------------------------------------------------
34 # Classes
35 #-----------------------------------------------------------------------------
36
37 class ClusterManager(LoggingConfigurable):
38
39 profiles = Dict()
40
41
42 def list_profile_names(self):
43 """List all profiles in the ipython_dir and cwd.
44 """
45 profiles = list_profiles_in(get_ipython_dir())
46 profiles += list_profiles_in(os.getcwdu())
47 return profiles
48
49
50 def list_profiles(self):
51 profiles = self.list_profile_names()
52 result = [self.profile_info(p) for p in profiles]
53 return result
54
55
56 def profile_info(self, profile):
57 if profile not in self.list_profile_names():
58 raise web.HTTPError(404, u'profile not found')
59 result = dict(profile=profile)
60 data = self.profiles.get(profile)
61 if data is None:
62 result['status'] = 'stopped'
63 else:
64 result['status'] = 'running'
65 result['n'] = data['n']
66 return result
67
68 def start_cluster(self, profile, n=4):
69 """Start a cluster for a given profile."""
70 if profile not in self.list_profile_names():
71 raise web.HTTPError(404, u'profile not found')
72 if profile in self.profiles:
73 raise web.HTTPError(409, u'cluster already running')
74 launcher = IPClusterLauncher(ipcluster_profile=profile, ipcluster_n=n)
75 launcher.start()
76 self.profiles[profile] = {
77 'launcher': launcher,
78 'n': n
79 }
80
81 def stop_cluster(self, profile):
82 """Stop a cluster for a given profile."""
83 if profile not in self.profiles:
84 raise web.HTTPError(409, u'cluster not running')
85 launcher = self.profiles.pop(profile)['launcher']
86 launcher.stop()
87
88 def stop_all_clusters(self):
89 for p in self.profiles.values():
90 p['launcher'].stop()
@@ -1,290 +1,293 b''
1 1 # encoding: utf-8
2 2 """
3 3 An application for managing IPython profiles.
4 4
5 5 To be invoked as the `ipython profile` subcommand.
6 6
7 7 Authors:
8 8
9 9 * Min RK
10 10
11 11 """
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Copyright (C) 2008-2011 The IPython Development Team
15 15 #
16 16 # Distributed under the terms of the BSD License. The full license is in
17 17 # the file COPYING, distributed as part of this software.
18 18 #-----------------------------------------------------------------------------
19 19
20 20 #-----------------------------------------------------------------------------
21 21 # Imports
22 22 #-----------------------------------------------------------------------------
23 23
24 24 import logging
25 25 import os
26 26
27 27 from IPython.config.application import Application, boolean_flag
28 28 from IPython.core.application import (
29 29 BaseIPythonApplication, base_flags, base_aliases
30 30 )
31 31 from IPython.core.profiledir import ProfileDir
32 32 from IPython.utils.path import get_ipython_dir, get_ipython_package_dir
33 33 from IPython.utils.traitlets import Unicode, Bool, Dict
34 34
35 35 #-----------------------------------------------------------------------------
36 36 # Constants
37 37 #-----------------------------------------------------------------------------
38 38
39 39 create_help = """Create an IPython profile by name
40 40
41 41 Create an ipython profile directory by its name or
42 42 profile directory path. Profile directories contain
43 43 configuration, log and security related files and are named
44 44 using the convention 'profile_<name>'. By default they are
45 45 located in your ipython directory. Once created, you will
46 46 can edit the configuration files in the profile
47 47 directory to configure IPython. Most users will create a
48 48 profile directory by name,
49 49 `ipython profile create myprofile`, which will put the directory
50 50 in `<ipython_dir>/profile_myprofile`.
51 51 """
52 52 list_help = """List available IPython profiles
53 53
54 54 List all available profiles, by profile location, that can
55 55 be found in the current working directly or in the ipython
56 56 directory. Profile directories are named using the convention
57 57 'profile_<profile>'.
58 58 """
59 59 profile_help = """Manage IPython profiles
60 60
61 61 Profile directories contain
62 62 configuration, log and security related files and are named
63 63 using the convention 'profile_<name>'. By default they are
64 64 located in your ipython directory. You can create profiles
65 65 with `ipython profile create <name>`, or see the profiles you
66 66 already have with `ipython profile list`
67 67
68 68 To get started configuring IPython, simply do:
69 69
70 70 $> ipython profile create
71 71
72 72 and IPython will create the default profile in <ipython_dir>/profile_default,
73 73 where you can edit ipython_config.py to start configuring IPython.
74 74
75 75 """
76 76
77 77 _list_examples = "ipython profile list # list all profiles"
78 78
79 79 _create_examples = """
80 80 ipython profile create foo # create profile foo w/ default config files
81 81 ipython profile create foo --reset # restage default config files over current
82 82 ipython profile create foo --parallel # also stage parallel config files
83 83 """
84 84
85 85 _main_examples = """
86 86 ipython profile create -h # show the help string for the create subcommand
87 87 ipython profile list -h # show the help string for the list subcommand
88 88 """
89 89
90 90 #-----------------------------------------------------------------------------
91 91 # Profile Application Class (for `ipython profile` subcommand)
92 92 #-----------------------------------------------------------------------------
93 93
94 94
95 def list_profiles_in(path):
96 """list profiles in a given root directory"""
97 files = os.listdir(path)
98 profiles = []
99 for f in files:
100 full_path = os.path.join(path, f)
101 if os.path.isdir(full_path) and f.startswith('profile_'):
102 profiles.append(f.split('_',1)[-1])
103 return profiles
104
105
106 def list_bundled_profiles():
107 """list profiles that are bundled with IPython."""
108 path = os.path.join(get_ipython_package_dir(), u'config', u'profile')
109 files = os.listdir(path)
110 profiles = []
111 for profile in files:
112 full_path = os.path.join(path, profile)
113 if os.path.isdir(full_path):
114 profiles.append(profile)
115 return profiles
116
117
95 118 class ProfileList(Application):
96 119 name = u'ipython-profile'
97 120 description = list_help
98 121 examples = _list_examples
99 122
100 123 aliases = Dict({
101 124 'ipython-dir' : 'ProfileList.ipython_dir',
102 125 'log-level' : 'Application.log_level',
103 126 })
104 127 flags = Dict(dict(
105 128 debug = ({'Application' : {'log_level' : 0}},
106 129 "Set Application.log_level to 0, maximizing log output."
107 130 )
108 131 ))
109 132
110 133 ipython_dir = Unicode(get_ipython_dir(), config=True,
111 134 help="""
112 135 The name of the IPython directory. This directory is used for logging
113 136 configuration (through profiles), history storage, etc. The default
114 137 is usually $HOME/.ipython. This options can also be specified through
115 138 the environment variable IPYTHON_DIR.
116 139 """
117 140 )
118 141
119 def _list_profiles_in(self, path):
120 """list profiles in a given root directory"""
121 files = os.listdir(path)
122 profiles = []
123 for f in files:
124 full_path = os.path.join(path, f)
125 if os.path.isdir(full_path) and f.startswith('profile_'):
126 profiles.append(f.split('_',1)[-1])
127 return profiles
128
129 def _list_bundled_profiles(self):
130 """list profiles in a given root directory"""
131 path = os.path.join(get_ipython_package_dir(), u'config', u'profile')
132 files = os.listdir(path)
133 profiles = []
134 for profile in files:
135 full_path = os.path.join(path, profile)
136 if os.path.isdir(full_path):
137 profiles.append(profile)
138 return profiles
139 142
140 143 def _print_profiles(self, profiles):
141 144 """print list of profiles, indented."""
142 145 for profile in profiles:
143 146 print ' %s' % profile
144 147
145 148 def list_profile_dirs(self):
146 profiles = self._list_bundled_profiles()
149 profiles = list_bundled_profiles()
147 150 if profiles:
148 151 print
149 152 print "Available profiles in IPython:"
150 153 self._print_profiles(profiles)
151 154 print
152 155 print " The first request for a bundled profile will copy it"
153 156 print " into your IPython directory (%s)," % self.ipython_dir
154 157 print " where you can customize it."
155 158
156 profiles = self._list_profiles_in(self.ipython_dir)
159 profiles = list_profiles_in(self.ipython_dir)
157 160 if profiles:
158 161 print
159 162 print "Available profiles in %s:" % self.ipython_dir
160 163 self._print_profiles(profiles)
161 164
162 profiles = self._list_profiles_in(os.getcwdu())
165 profiles = list_profiles_in(os.getcwdu())
163 166 if profiles:
164 167 print
165 168 print "Available profiles in current directory (%s):" % os.getcwdu()
166 169 self._print_profiles(profiles)
167 170
168 171 print
169 172 print "To use any of the above profiles, start IPython with:"
170 173 print " ipython --profile=<name>"
171 174 print
172 175
173 176 def start(self):
174 177 self.list_profile_dirs()
175 178
176 179
177 180 create_flags = {}
178 181 create_flags.update(base_flags)
179 182 # don't include '--init' flag, which implies running profile create in other apps
180 183 create_flags.pop('init')
181 184 create_flags['reset'] = ({'ProfileCreate': {'overwrite' : True}},
182 185 "reset config files in this profile to the defaults.")
183 186 create_flags['parallel'] = ({'ProfileCreate': {'parallel' : True}},
184 187 "Include the config files for parallel "
185 188 "computing apps (ipengine, ipcontroller, etc.)")
186 189
187 190
188 191 class ProfileCreate(BaseIPythonApplication):
189 192 name = u'ipython-profile'
190 193 description = create_help
191 194 examples = _create_examples
192 195 auto_create = Bool(True, config=False)
193 196
194 197 def _copy_config_files_default(self):
195 198 return True
196 199
197 200 parallel = Bool(False, config=True,
198 201 help="whether to include parallel computing config files")
199 202 def _parallel_changed(self, name, old, new):
200 203 parallel_files = [ 'ipcontroller_config.py',
201 204 'ipengine_config.py',
202 205 'ipcluster_config.py'
203 206 ]
204 207 if new:
205 208 for cf in parallel_files:
206 209 self.config_files.append(cf)
207 210 else:
208 211 for cf in parallel_files:
209 212 if cf in self.config_files:
210 213 self.config_files.remove(cf)
211 214
212 215 def parse_command_line(self, argv):
213 216 super(ProfileCreate, self).parse_command_line(argv)
214 217 # accept positional arg as profile name
215 218 if self.extra_args:
216 219 self.profile = self.extra_args[0]
217 220
218 221 flags = Dict(create_flags)
219 222
220 223 classes = [ProfileDir]
221 224
222 225 def init_config_files(self):
223 226 super(ProfileCreate, self).init_config_files()
224 227 # use local imports, since these classes may import from here
225 228 from IPython.frontend.terminal.ipapp import TerminalIPythonApp
226 229 apps = [TerminalIPythonApp]
227 230 try:
228 231 from IPython.frontend.qt.console.qtconsoleapp import IPythonQtConsoleApp
229 232 except Exception:
230 233 # this should be ImportError, but under weird circumstances
231 234 # this might be an AttributeError, or possibly others
232 235 # in any case, nothing should cause the profile creation to crash.
233 236 pass
234 237 else:
235 238 apps.append(IPythonQtConsoleApp)
236 239 try:
237 240 from IPython.frontend.html.notebook.notebookapp import NotebookApp
238 241 except ImportError:
239 242 pass
240 243 except Exception:
241 244 self.log.debug('Unexpected error when importing NotebookApp',
242 245 exc_info=True
243 246 )
244 247 else:
245 248 apps.append(NotebookApp)
246 249 if self.parallel:
247 250 from IPython.parallel.apps.ipcontrollerapp import IPControllerApp
248 251 from IPython.parallel.apps.ipengineapp import IPEngineApp
249 252 from IPython.parallel.apps.ipclusterapp import IPClusterStart
250 253 from IPython.parallel.apps.iploggerapp import IPLoggerApp
251 254 apps.extend([
252 255 IPControllerApp,
253 256 IPEngineApp,
254 257 IPClusterStart,
255 258 IPLoggerApp,
256 259 ])
257 260 for App in apps:
258 261 app = App()
259 262 app.config.update(self.config)
260 263 app.log = self.log
261 264 app.overwrite = self.overwrite
262 265 app.copy_config_files=True
263 266 app.profile = self.profile
264 267 app.init_profile_dir()
265 268 app.init_config_files()
266 269
267 270 def stage_default_config_file(self):
268 271 pass
269 272
270 273
271 274 class ProfileApp(Application):
272 275 name = u'ipython-profile'
273 276 description = profile_help
274 277 examples = _main_examples
275 278
276 279 subcommands = Dict(dict(
277 280 create = (ProfileCreate, "Create a new profile dir with default config files"),
278 281 list = (ProfileList, "List existing profiles")
279 282 ))
280 283
281 284 def start(self):
282 285 if self.subapp is None:
283 286 print "No subcommand specified. Must specify one of: %s"%(self.subcommands.keys())
284 287 print
285 288 self.print_description()
286 289 self.print_subcommands()
287 290 self.exit(1)
288 291 else:
289 292 return self.subapp.start()
290 293
@@ -1,694 +1,728 b''
1 1 """Tornado handlers for the notebook.
2 2
3 3 Authors:
4 4
5 5 * Brian Granger
6 6 """
7 7
8 8 #-----------------------------------------------------------------------------
9 9 # Copyright (C) 2008-2011 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-----------------------------------------------------------------------------
14 14
15 15 #-----------------------------------------------------------------------------
16 16 # Imports
17 17 #-----------------------------------------------------------------------------
18 18
19 19 import logging
20 20 import Cookie
21 21 import time
22 22 import uuid
23 23
24 24 from tornado import web
25 25 from tornado import websocket
26 26
27 27 from zmq.eventloop import ioloop
28 28 from zmq.utils import jsonapi
29 29
30 30 from IPython.external.decorator import decorator
31 31 from IPython.zmq.session import Session
32 32 from IPython.lib.security import passwd_check
33 33
34 34 try:
35 35 from docutils.core import publish_string
36 36 except ImportError:
37 37 publish_string = None
38 38
39 39 #-----------------------------------------------------------------------------
40 40 # Monkeypatch for Tornado <= 2.1.1 - Remove when no longer necessary!
41 41 #-----------------------------------------------------------------------------
42 42
43 43 # Google Chrome, as of release 16, changed its websocket protocol number. The
44 44 # parts tornado cares about haven't really changed, so it's OK to continue
45 45 # accepting Chrome connections, but as of Tornado 2.1.1 (the currently released
46 46 # version as of Oct 30/2011) the version check fails, see the issue report:
47 47
48 48 # https://github.com/facebook/tornado/issues/385
49 49
50 50 # This issue has been fixed in Tornado post 2.1.1:
51 51
52 52 # https://github.com/facebook/tornado/commit/84d7b458f956727c3b0d6710
53 53
54 54 # Here we manually apply the same patch as above so that users of IPython can
55 55 # continue to work with an officially released Tornado. We make the
56 56 # monkeypatch version check as narrow as possible to limit its effects; once
57 57 # Tornado 2.1.1 is no longer found in the wild we'll delete this code.
58 58
59 59 import tornado
60 60
61 61 if tornado.version_info <= (2,1,1):
62 62
63 63 def _execute(self, transforms, *args, **kwargs):
64 64 from tornado.websocket import WebSocketProtocol8, WebSocketProtocol76
65 65
66 66 self.open_args = args
67 67 self.open_kwargs = kwargs
68 68
69 69 # The difference between version 8 and 13 is that in 8 the
70 70 # client sends a "Sec-Websocket-Origin" header and in 13 it's
71 71 # simply "Origin".
72 72 if self.request.headers.get("Sec-WebSocket-Version") in ("7", "8", "13"):
73 73 self.ws_connection = WebSocketProtocol8(self)
74 74 self.ws_connection.accept_connection()
75 75
76 76 elif self.request.headers.get("Sec-WebSocket-Version"):
77 77 self.stream.write(tornado.escape.utf8(
78 78 "HTTP/1.1 426 Upgrade Required\r\n"
79 79 "Sec-WebSocket-Version: 8\r\n\r\n"))
80 80 self.stream.close()
81 81
82 82 else:
83 83 self.ws_connection = WebSocketProtocol76(self)
84 84 self.ws_connection.accept_connection()
85 85
86 86 websocket.WebSocketHandler._execute = _execute
87 87 del _execute
88 88
89 89 #-----------------------------------------------------------------------------
90 90 # Decorator for disabling read-only handlers
91 91 #-----------------------------------------------------------------------------
92 92
93 93 @decorator
94 94 def not_if_readonly(f, self, *args, **kwargs):
95 95 if self.application.read_only:
96 96 raise web.HTTPError(403, "Notebook server is read-only")
97 97 else:
98 98 return f(self, *args, **kwargs)
99 99
100 100 @decorator
101 101 def authenticate_unless_readonly(f, self, *args, **kwargs):
102 102 """authenticate this page *unless* readonly view is active.
103 103
104 104 In read-only mode, the notebook list and print view should
105 105 be accessible without authentication.
106 106 """
107 107
108 108 @web.authenticated
109 109 def auth_f(self, *args, **kwargs):
110 110 return f(self, *args, **kwargs)
111 111
112 112 if self.application.read_only:
113 113 return f(self, *args, **kwargs)
114 114 else:
115 115 return auth_f(self, *args, **kwargs)
116 116
117 117 #-----------------------------------------------------------------------------
118 118 # Top-level handlers
119 119 #-----------------------------------------------------------------------------
120 120
121 121 class RequestHandler(web.RequestHandler):
122 122 """RequestHandler with default variable setting."""
123 123
124 124 def render(*args, **kwargs):
125 125 kwargs.setdefault('message', '')
126 126 return web.RequestHandler.render(*args, **kwargs)
127 127
128 128 class AuthenticatedHandler(RequestHandler):
129 129 """A RequestHandler with an authenticated user."""
130 130
131 131 def get_current_user(self):
132 132 user_id = self.get_secure_cookie("username")
133 133 # For now the user_id should not return empty, but it could eventually
134 134 if user_id == '':
135 135 user_id = 'anonymous'
136 136 if user_id is None:
137 137 # prevent extra Invalid cookie sig warnings:
138 138 self.clear_cookie('username')
139 139 if not self.application.password and not self.application.read_only:
140 140 user_id = 'anonymous'
141 141 return user_id
142 142
143 143 @property
144 144 def logged_in(self):
145 145 """Is a user currently logged in?
146 146
147 147 """
148 148 user = self.get_current_user()
149 149 return (user and not user == 'anonymous')
150 150
151 151 @property
152 152 def login_available(self):
153 153 """May a user proceed to log in?
154 154
155 155 This returns True if login capability is available, irrespective of
156 156 whether the user is already logged in or not.
157 157
158 158 """
159 159 return bool(self.application.password)
160 160
161 161 @property
162 162 def read_only(self):
163 163 """Is the notebook read-only?
164 164
165 165 """
166 166 return self.application.read_only
167 167
168 168 @property
169 169 def ws_url(self):
170 170 """websocket url matching the current request
171 171
172 172 turns http[s]://host[:port] into
173 173 ws[s]://host[:port]
174 174 """
175 175 proto = self.request.protocol.replace('http', 'ws')
176 176 host = self.application.ipython_app.websocket_host # default to config value
177 177 if host == '':
178 178 host = self.request.host # get from request
179 179 return "%s://%s" % (proto, host)
180 180
181 181
182 182 class AuthenticatedFileHandler(AuthenticatedHandler, web.StaticFileHandler):
183 183 """static files should only be accessible when logged in"""
184 184
185 185 @authenticate_unless_readonly
186 186 def get(self, path):
187 187 return web.StaticFileHandler.get(self, path)
188 188
189 189
190 190 class ProjectDashboardHandler(AuthenticatedHandler):
191 191
192 192 @authenticate_unless_readonly
193 193 def get(self):
194 194 nbm = self.application.notebook_manager
195 195 project = nbm.notebook_dir
196 196 self.render(
197 197 'projectdashboard.html', project=project,
198 198 base_project_url=self.application.ipython_app.base_project_url,
199 199 base_kernel_url=self.application.ipython_app.base_kernel_url,
200 200 read_only=self.read_only,
201 201 logged_in=self.logged_in,
202 202 login_available=self.login_available
203 203 )
204 204
205 205
206 206 class LoginHandler(AuthenticatedHandler):
207 207
208 208 def _render(self, message=None):
209 209 self.render('login.html',
210 210 next=self.get_argument('next', default='/'),
211 211 read_only=self.read_only,
212 212 logged_in=self.logged_in,
213 213 login_available=self.login_available,
214 214 message=message
215 215 )
216 216
217 217 def get(self):
218 218 if self.current_user:
219 219 self.redirect(self.get_argument('next', default='/'))
220 220 else:
221 221 self._render()
222 222
223 223 def post(self):
224 224 pwd = self.get_argument('password', default=u'')
225 225 if self.application.password:
226 226 if passwd_check(self.application.password, pwd):
227 227 self.set_secure_cookie('username', str(uuid.uuid4()))
228 228 else:
229 229 self._render(message={'error': 'Invalid password'})
230 230 return
231 231
232 232 self.redirect(self.get_argument('next', default='/'))
233 233
234 234
235 235 class LogoutHandler(AuthenticatedHandler):
236 236
237 237 def get(self):
238 238 self.clear_cookie('username')
239 239 if self.login_available:
240 240 message = {'info': 'Successfully logged out.'}
241 241 else:
242 242 message = {'warning': 'Cannot log out. Notebook authentication '
243 243 'is disabled.'}
244 244
245 245 self.render('logout.html',
246 246 read_only=self.read_only,
247 247 logged_in=self.logged_in,
248 248 login_available=self.login_available,
249 249 message=message)
250 250
251 251
252 252 class NewHandler(AuthenticatedHandler):
253 253
254 254 @web.authenticated
255 255 def get(self):
256 256 nbm = self.application.notebook_manager
257 257 project = nbm.notebook_dir
258 258 notebook_id = nbm.new_notebook()
259 259 self.render(
260 260 'notebook.html', project=project,
261 261 notebook_id=notebook_id,
262 262 base_project_url=self.application.ipython_app.base_project_url,
263 263 base_kernel_url=self.application.ipython_app.base_kernel_url,
264 264 kill_kernel=False,
265 265 read_only=False,
266 266 logged_in=self.logged_in,
267 267 login_available=self.login_available,
268 268 mathjax_url=self.application.ipython_app.mathjax_url,
269 269 )
270 270
271 271
272 272 class NamedNotebookHandler(AuthenticatedHandler):
273 273
274 274 @authenticate_unless_readonly
275 275 def get(self, notebook_id):
276 276 nbm = self.application.notebook_manager
277 277 project = nbm.notebook_dir
278 278 if not nbm.notebook_exists(notebook_id):
279 279 raise web.HTTPError(404, u'Notebook does not exist: %s' % notebook_id)
280 280
281 281 self.render(
282 282 'notebook.html', project=project,
283 283 notebook_id=notebook_id,
284 284 base_project_url=self.application.ipython_app.base_project_url,
285 285 base_kernel_url=self.application.ipython_app.base_kernel_url,
286 286 kill_kernel=False,
287 287 read_only=self.read_only,
288 288 logged_in=self.logged_in,
289 289 login_available=self.login_available,
290 290 mathjax_url=self.application.ipython_app.mathjax_url,
291 291 )
292 292
293 293
294 294 class PrintNotebookHandler(AuthenticatedHandler):
295 295
296 296 @authenticate_unless_readonly
297 297 def get(self, notebook_id):
298 298 nbm = self.application.notebook_manager
299 299 project = nbm.notebook_dir
300 300 if not nbm.notebook_exists(notebook_id):
301 301 raise web.HTTPError(404, u'Notebook does not exist: %s' % notebook_id)
302 302
303 303 self.render(
304 304 'printnotebook.html', project=project,
305 305 notebook_id=notebook_id,
306 306 base_project_url=self.application.ipython_app.base_project_url,
307 307 base_kernel_url=self.application.ipython_app.base_kernel_url,
308 308 kill_kernel=False,
309 309 read_only=self.read_only,
310 310 logged_in=self.logged_in,
311 311 login_available=self.login_available,
312 312 mathjax_url=self.application.ipython_app.mathjax_url,
313 313 )
314 314
315 315 #-----------------------------------------------------------------------------
316 316 # Kernel handlers
317 317 #-----------------------------------------------------------------------------
318 318
319 319
320 320 class MainKernelHandler(AuthenticatedHandler):
321 321
322 322 @web.authenticated
323 323 def get(self):
324 324 km = self.application.kernel_manager
325 325 self.finish(jsonapi.dumps(km.kernel_ids))
326 326
327 327 @web.authenticated
328 328 def post(self):
329 329 km = self.application.kernel_manager
330 330 notebook_id = self.get_argument('notebook', default=None)
331 331 kernel_id = km.start_kernel(notebook_id)
332 332 data = {'ws_url':self.ws_url,'kernel_id':kernel_id}
333 333 self.set_header('Location', '/'+kernel_id)
334 334 self.finish(jsonapi.dumps(data))
335 335
336 336
337 337 class KernelHandler(AuthenticatedHandler):
338 338
339 339 SUPPORTED_METHODS = ('DELETE')
340 340
341 341 @web.authenticated
342 342 def delete(self, kernel_id):
343 343 km = self.application.kernel_manager
344 344 km.kill_kernel(kernel_id)
345 345 self.set_status(204)
346 346 self.finish()
347 347
348 348
349 349 class KernelActionHandler(AuthenticatedHandler):
350 350
351 351 @web.authenticated
352 352 def post(self, kernel_id, action):
353 353 km = self.application.kernel_manager
354 354 if action == 'interrupt':
355 355 km.interrupt_kernel(kernel_id)
356 356 self.set_status(204)
357 357 if action == 'restart':
358 358 new_kernel_id = km.restart_kernel(kernel_id)
359 359 data = {'ws_url':self.ws_url,'kernel_id':new_kernel_id}
360 360 self.set_header('Location', '/'+new_kernel_id)
361 361 self.write(jsonapi.dumps(data))
362 362 self.finish()
363 363
364 364
365 365 class ZMQStreamHandler(websocket.WebSocketHandler):
366 366
367 367 def _reserialize_reply(self, msg_list):
368 368 """Reserialize a reply message using JSON.
369 369
370 370 This takes the msg list from the ZMQ socket, unserializes it using
371 371 self.session and then serializes the result using JSON. This method
372 372 should be used by self._on_zmq_reply to build messages that can
373 373 be sent back to the browser.
374 374 """
375 375 idents, msg_list = self.session.feed_identities(msg_list)
376 376 msg = self.session.unserialize(msg_list)
377 377 try:
378 378 msg['header'].pop('date')
379 379 except KeyError:
380 380 pass
381 381 try:
382 382 msg['parent_header'].pop('date')
383 383 except KeyError:
384 384 pass
385 385 msg.pop('buffers')
386 386 return jsonapi.dumps(msg)
387 387
388 388 def _on_zmq_reply(self, msg_list):
389 389 try:
390 390 msg = self._reserialize_reply(msg_list)
391 391 except:
392 392 self.application.log.critical("Malformed message: %r" % msg_list)
393 393 else:
394 394 self.write_message(msg)
395 395
396 396 def allow_draft76(self):
397 397 """Allow draft 76, until browsers such as Safari update to RFC 6455.
398 398
399 399 This has been disabled by default in tornado in release 2.2.0, and
400 400 support will be removed in later versions.
401 401 """
402 402 return True
403 403
404 404
405 405 class AuthenticatedZMQStreamHandler(ZMQStreamHandler):
406 406
407 407 def open(self, kernel_id):
408 408 self.kernel_id = kernel_id.decode('ascii')
409 409 try:
410 410 cfg = self.application.ipython_app.config
411 411 except AttributeError:
412 412 # protect from the case where this is run from something other than
413 413 # the notebook app:
414 414 cfg = None
415 415 self.session = Session(config=cfg)
416 416 self.save_on_message = self.on_message
417 417 self.on_message = self.on_first_message
418 418
419 419 def get_current_user(self):
420 420 user_id = self.get_secure_cookie("username")
421 421 if user_id == '' or (user_id is None and not self.application.password):
422 422 user_id = 'anonymous'
423 423 return user_id
424 424
425 425 def _inject_cookie_message(self, msg):
426 426 """Inject the first message, which is the document cookie,
427 427 for authentication."""
428 428 if isinstance(msg, unicode):
429 429 # Cookie can't constructor doesn't accept unicode strings for some reason
430 430 msg = msg.encode('utf8', 'replace')
431 431 try:
432 432 self.request._cookies = Cookie.SimpleCookie(msg)
433 433 except:
434 434 logging.warn("couldn't parse cookie string: %s",msg, exc_info=True)
435 435
436 436 def on_first_message(self, msg):
437 437 self._inject_cookie_message(msg)
438 438 if self.get_current_user() is None:
439 439 logging.warn("Couldn't authenticate WebSocket connection")
440 440 raise web.HTTPError(403)
441 441 self.on_message = self.save_on_message
442 442
443 443
444 444 class IOPubHandler(AuthenticatedZMQStreamHandler):
445 445
446 446 def initialize(self, *args, **kwargs):
447 447 self._kernel_alive = True
448 448 self._beating = False
449 449 self.iopub_stream = None
450 450 self.hb_stream = None
451 451
452 452 def on_first_message(self, msg):
453 453 try:
454 454 super(IOPubHandler, self).on_first_message(msg)
455 455 except web.HTTPError:
456 456 self.close()
457 457 return
458 458 km = self.application.kernel_manager
459 459 self.time_to_dead = km.time_to_dead
460 460 self.first_beat = km.first_beat
461 461 kernel_id = self.kernel_id
462 462 try:
463 463 self.iopub_stream = km.create_iopub_stream(kernel_id)
464 464 self.hb_stream = km.create_hb_stream(kernel_id)
465 465 except web.HTTPError:
466 466 # WebSockets don't response to traditional error codes so we
467 467 # close the connection.
468 468 if not self.stream.closed():
469 469 self.stream.close()
470 470 self.close()
471 471 else:
472 472 self.iopub_stream.on_recv(self._on_zmq_reply)
473 473 self.start_hb(self.kernel_died)
474 474
475 475 def on_message(self, msg):
476 476 pass
477 477
478 478 def on_close(self):
479 479 # This method can be called twice, once by self.kernel_died and once
480 480 # from the WebSocket close event. If the WebSocket connection is
481 481 # closed before the ZMQ streams are setup, they could be None.
482 482 self.stop_hb()
483 483 if self.iopub_stream is not None and not self.iopub_stream.closed():
484 484 self.iopub_stream.on_recv(None)
485 485 self.iopub_stream.close()
486 486 if self.hb_stream is not None and not self.hb_stream.closed():
487 487 self.hb_stream.close()
488 488
489 489 def start_hb(self, callback):
490 490 """Start the heartbeating and call the callback if the kernel dies."""
491 491 if not self._beating:
492 492 self._kernel_alive = True
493 493
494 494 def ping_or_dead():
495 495 self.hb_stream.flush()
496 496 if self._kernel_alive:
497 497 self._kernel_alive = False
498 498 self.hb_stream.send(b'ping')
499 499 # flush stream to force immediate socket send
500 500 self.hb_stream.flush()
501 501 else:
502 502 try:
503 503 callback()
504 504 except:
505 505 pass
506 506 finally:
507 507 self.stop_hb()
508 508
509 509 def beat_received(msg):
510 510 self._kernel_alive = True
511 511
512 512 self.hb_stream.on_recv(beat_received)
513 513 loop = ioloop.IOLoop.instance()
514 514 self._hb_periodic_callback = ioloop.PeriodicCallback(ping_or_dead, self.time_to_dead*1000, loop)
515 515 loop.add_timeout(time.time()+self.first_beat, self._really_start_hb)
516 516 self._beating= True
517 517
518 518 def _really_start_hb(self):
519 519 """callback for delayed heartbeat start
520 520
521 521 Only start the hb loop if we haven't been closed during the wait.
522 522 """
523 523 if self._beating and not self.hb_stream.closed():
524 524 self._hb_periodic_callback.start()
525 525
526 526 def stop_hb(self):
527 527 """Stop the heartbeating and cancel all related callbacks."""
528 528 if self._beating:
529 529 self._beating = False
530 530 self._hb_periodic_callback.stop()
531 531 if not self.hb_stream.closed():
532 532 self.hb_stream.on_recv(None)
533 533
534 534 def kernel_died(self):
535 535 self.application.kernel_manager.delete_mapping_for_kernel(self.kernel_id)
536 536 self.application.log.error("Kernel %s failed to respond to heartbeat", self.kernel_id)
537 537 self.write_message(
538 538 {'header': {'msg_type': 'status'},
539 539 'parent_header': {},
540 540 'content': {'execution_state':'dead'}
541 541 }
542 542 )
543 543 self.on_close()
544 544
545 545
546 546 class ShellHandler(AuthenticatedZMQStreamHandler):
547 547
548 548 def initialize(self, *args, **kwargs):
549 549 self.shell_stream = None
550 550
551 551 def on_first_message(self, msg):
552 552 try:
553 553 super(ShellHandler, self).on_first_message(msg)
554 554 except web.HTTPError:
555 555 self.close()
556 556 return
557 557 km = self.application.kernel_manager
558 558 self.max_msg_size = km.max_msg_size
559 559 kernel_id = self.kernel_id
560 560 try:
561 561 self.shell_stream = km.create_shell_stream(kernel_id)
562 562 except web.HTTPError:
563 563 # WebSockets don't response to traditional error codes so we
564 564 # close the connection.
565 565 if not self.stream.closed():
566 566 self.stream.close()
567 567 self.close()
568 568 else:
569 569 self.shell_stream.on_recv(self._on_zmq_reply)
570 570
571 571 def on_message(self, msg):
572 572 if len(msg) < self.max_msg_size:
573 573 msg = jsonapi.loads(msg)
574 574 self.session.send(self.shell_stream, msg)
575 575
576 576 def on_close(self):
577 577 # Make sure the stream exists and is not already closed.
578 578 if self.shell_stream is not None and not self.shell_stream.closed():
579 579 self.shell_stream.close()
580 580
581 581
582 582 #-----------------------------------------------------------------------------
583 583 # Notebook web service handlers
584 584 #-----------------------------------------------------------------------------
585 585
586 586 class NotebookRootHandler(AuthenticatedHandler):
587 587
588 588 @authenticate_unless_readonly
589 589 def get(self):
590
591 590 nbm = self.application.notebook_manager
592 591 files = nbm.list_notebooks()
593 592 self.finish(jsonapi.dumps(files))
594 593
595 594 @web.authenticated
596 595 def post(self):
597 596 nbm = self.application.notebook_manager
598 597 body = self.request.body.strip()
599 598 format = self.get_argument('format', default='json')
600 599 name = self.get_argument('name', default=None)
601 600 if body:
602 601 notebook_id = nbm.save_new_notebook(body, name=name, format=format)
603 602 else:
604 603 notebook_id = nbm.new_notebook()
605 604 self.set_header('Location', '/'+notebook_id)
606 605 self.finish(jsonapi.dumps(notebook_id))
607 606
608 607
609 608 class NotebookHandler(AuthenticatedHandler):
610 609
611 610 SUPPORTED_METHODS = ('GET', 'PUT', 'DELETE')
612 611
613 612 @authenticate_unless_readonly
614 613 def get(self, notebook_id):
615 614 nbm = self.application.notebook_manager
616 615 format = self.get_argument('format', default='json')
617 616 last_mod, name, data = nbm.get_notebook(notebook_id, format)
618 617
619 618 if format == u'json':
620 619 self.set_header('Content-Type', 'application/json')
621 620 self.set_header('Content-Disposition','attachment; filename="%s.ipynb"' % name)
622 621 elif format == u'py':
623 622 self.set_header('Content-Type', 'application/x-python')
624 623 self.set_header('Content-Disposition','attachment; filename="%s.py"' % name)
625 624 self.set_header('Last-Modified', last_mod)
626 625 self.finish(data)
627 626
628 627 @web.authenticated
629 628 def put(self, notebook_id):
630 629 nbm = self.application.notebook_manager
631 630 format = self.get_argument('format', default='json')
632 631 name = self.get_argument('name', default=None)
633 632 nbm.save_notebook(notebook_id, self.request.body, name=name, format=format)
634 633 self.set_status(204)
635 634 self.finish()
636 635
637 636 @web.authenticated
638 637 def delete(self, notebook_id):
639 638 nbm = self.application.notebook_manager
640 639 nbm.delete_notebook(notebook_id)
641 640 self.set_status(204)
642 641 self.finish()
643 642
644 643
645 644 class NotebookCopyHandler(AuthenticatedHandler):
646 645
647 646 @web.authenticated
648 647 def get(self, notebook_id):
649 648 nbm = self.application.notebook_manager
650 649 project = nbm.notebook_dir
651 650 notebook_id = nbm.copy_notebook(notebook_id)
652 651 self.render(
653 652 'notebook.html', project=project,
654 653 notebook_id=notebook_id,
655 654 base_project_url=self.application.ipython_app.base_project_url,
656 655 base_kernel_url=self.application.ipython_app.base_kernel_url,
657 656 kill_kernel=False,
658 657 read_only=False,
659 658 logged_in=self.logged_in,
660 659 login_available=self.login_available,
661 660 mathjax_url=self.application.ipython_app.mathjax_url,
662 661 )
663 662
663
664 #-----------------------------------------------------------------------------
665 # Cluster handlers
666 #-----------------------------------------------------------------------------
667
668
669 class MainClusterHandler(AuthenticatedHandler):
670
671 @web.authenticated
672 def get(self):
673 cm = self.application.cluster_manager
674 self.finish(jsonapi.dumps(cm.list_profiles()))
675
676
677 class ClusterProfileHandler(AuthenticatedHandler):
678
679 @web.authenticated
680 def get(self, profile):
681 cm = self.application.cluster_manager
682 self.finish(jsonapi.dumps(cm.profile_info(profile)))
683
684
685 class ClusterActionHandler(AuthenticatedHandler):
686
687 @web.authenticated
688 def post(self, profile, action):
689 cm = self.application.cluster_manager
690 if action == 'start':
691 n = int(self.get_argument('n', default=4))
692 cm.start_cluster(profile, n)
693 if action == 'stop':
694 cm.stop_cluster(profile)
695 self.finish()
696
697
664 698 #-----------------------------------------------------------------------------
665 699 # RST web service handlers
666 700 #-----------------------------------------------------------------------------
667 701
668 702
669 703 class RSTHandler(AuthenticatedHandler):
670 704
671 705 @web.authenticated
672 706 def post(self):
673 707 if publish_string is None:
674 708 raise web.HTTPError(503, u'docutils not available')
675 709 body = self.request.body.strip()
676 710 source = body
677 711 # template_path=os.path.join(os.path.dirname(__file__), u'templates', u'rst_template.html')
678 712 defaults = {'file_insertion_enabled': 0,
679 713 'raw_enabled': 0,
680 714 '_disable_config': 1,
681 715 'stylesheet_path': 0
682 716 # 'template': template_path
683 717 }
684 718 try:
685 719 html = publish_string(source, writer_name='html',
686 720 settings_overrides=defaults
687 721 )
688 722 except:
689 723 raise web.HTTPError(400, u'Invalid RST')
690 724 print html
691 725 self.set_header('Content-Type', 'text/html')
692 726 self.finish(html)
693 727
694 728
@@ -1,491 +1,503 b''
1 1 # coding: utf-8
2 2 """A tornado based IPython notebook server.
3 3
4 4 Authors:
5 5
6 6 * Brian Granger
7 7 """
8 8 #-----------------------------------------------------------------------------
9 9 # Copyright (C) 2008-2011 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-----------------------------------------------------------------------------
14 14
15 15 #-----------------------------------------------------------------------------
16 16 # Imports
17 17 #-----------------------------------------------------------------------------
18 18
19 19 # stdlib
20 20 import errno
21 21 import logging
22 22 import os
23 23 import signal
24 24 import socket
25 25 import sys
26 26 import threading
27 27 import webbrowser
28 28
29 29 # Third party
30 30 import zmq
31 31
32 32 # Install the pyzmq ioloop. This has to be done before anything else from
33 33 # tornado is imported.
34 34 from zmq.eventloop import ioloop
35 35 # FIXME: ioloop.install is new in pyzmq-2.1.7, so remove this conditional
36 36 # when pyzmq dependency is updated beyond that.
37 37 if hasattr(ioloop, 'install'):
38 38 ioloop.install()
39 39 else:
40 40 import tornado.ioloop
41 41 tornado.ioloop.IOLoop = ioloop.IOLoop
42 42
43 43 from tornado import httpserver
44 44 from tornado import web
45 45
46 46 # Our own libraries
47 47 from .kernelmanager import MappingKernelManager
48 48 from .handlers import (LoginHandler, LogoutHandler,
49 49 ProjectDashboardHandler, NewHandler, NamedNotebookHandler,
50 50 MainKernelHandler, KernelHandler, KernelActionHandler, IOPubHandler,
51 51 ShellHandler, NotebookRootHandler, NotebookHandler, NotebookCopyHandler,
52 RSTHandler, AuthenticatedFileHandler, PrintNotebookHandler
52 RSTHandler, AuthenticatedFileHandler, PrintNotebookHandler,
53 MainClusterHandler, ClusterProfileHandler, ClusterActionHandler
53 54 )
54 55 from .notebookmanager import NotebookManager
56 from .clustermanager import ClusterManager
55 57
56 58 from IPython.config.application import catch_config_error, boolean_flag
57 59 from IPython.core.application import BaseIPythonApplication
58 60 from IPython.core.profiledir import ProfileDir
59 61 from IPython.lib.kernel import swallow_argv
60 62 from IPython.zmq.session import Session, default_secure
61 63 from IPython.zmq.zmqshell import ZMQInteractiveShell
62 64 from IPython.zmq.ipkernel import (
63 65 flags as ipkernel_flags,
64 66 aliases as ipkernel_aliases,
65 67 IPKernelApp
66 68 )
67 69 from IPython.utils.traitlets import Dict, Unicode, Integer, List, Enum, Bool
68 70 from IPython.utils import py3compat
69 71
70 72 #-----------------------------------------------------------------------------
71 73 # Module globals
72 74 #-----------------------------------------------------------------------------
73 75
74 76 _kernel_id_regex = r"(?P<kernel_id>\w+-\w+-\w+-\w+-\w+)"
75 77 _kernel_action_regex = r"(?P<action>restart|interrupt)"
76 78 _notebook_id_regex = r"(?P<notebook_id>\w+-\w+-\w+-\w+-\w+)"
79 _profile_regex = r"(?P<profile>[a-zA-Z0-9]+)"
80 _cluster_action_regex = r"(?P<action>start|stop)"
81
77 82
78 83 LOCALHOST = '127.0.0.1'
79 84
80 85 _examples = """
81 86 ipython notebook # start the notebook
82 87 ipython notebook --profile=sympy # use the sympy profile
83 88 ipython notebook --pylab=inline # pylab in inline plotting mode
84 89 ipython notebook --certfile=mycert.pem # use SSL/TLS certificate
85 90 ipython notebook --port=5555 --ip=* # Listen on port 5555, all interfaces
86 91 """
87 92
88 93 #-----------------------------------------------------------------------------
89 94 # Helper functions
90 95 #-----------------------------------------------------------------------------
91 96
92 97 def url_path_join(a,b):
93 98 if a.endswith('/') and b.startswith('/'):
94 99 return a[:-1]+b
95 100 else:
96 101 return a+b
97 102
98 103 #-----------------------------------------------------------------------------
99 104 # The Tornado web application
100 105 #-----------------------------------------------------------------------------
101 106
102 107 class NotebookWebApplication(web.Application):
103 108
104 def __init__(self, ipython_app, kernel_manager, notebook_manager, log,
109 def __init__(self, ipython_app, kernel_manager, notebook_manager,
110 cluster_manager, log,
105 111 base_project_url, settings_overrides):
106 112 handlers = [
107 113 (r"/", ProjectDashboardHandler),
108 114 (r"/login", LoginHandler),
109 115 (r"/logout", LogoutHandler),
110 116 (r"/new", NewHandler),
111 117 (r"/%s" % _notebook_id_regex, NamedNotebookHandler),
112 118 (r"/%s/copy" % _notebook_id_regex, NotebookCopyHandler),
113 119 (r"/%s/print" % _notebook_id_regex, PrintNotebookHandler),
114 120 (r"/kernels", MainKernelHandler),
115 121 (r"/kernels/%s" % _kernel_id_regex, KernelHandler),
116 122 (r"/kernels/%s/%s" % (_kernel_id_regex, _kernel_action_regex), KernelActionHandler),
117 123 (r"/kernels/%s/iopub" % _kernel_id_regex, IOPubHandler),
118 124 (r"/kernels/%s/shell" % _kernel_id_regex, ShellHandler),
119 125 (r"/notebooks", NotebookRootHandler),
120 126 (r"/notebooks/%s" % _notebook_id_regex, NotebookHandler),
121 127 (r"/rstservice/render", RSTHandler),
122 128 (r"/files/(.*)", AuthenticatedFileHandler, {'path' : notebook_manager.notebook_dir}),
129 (r"/clusters", MainClusterHandler),
130 (r"/clusters/%s/%s" % (_profile_regex, _cluster_action_regex), ClusterActionHandler),
131 (r"/clusters/%s" % _profile_regex, ClusterProfileHandler),
123 132 ]
124 133 settings = dict(
125 134 template_path=os.path.join(os.path.dirname(__file__), "templates"),
126 135 static_path=os.path.join(os.path.dirname(__file__), "static"),
127 136 cookie_secret=os.urandom(1024),
128 137 login_url="/login",
129 138 )
130 139
131 140 # allow custom overrides for the tornado web app.
132 141 settings.update(settings_overrides)
133 142
134 143 # Python < 2.6.5 doesn't accept unicode keys in f(**kwargs), and
135 144 # base_project_url will always be unicode, which will in turn
136 145 # make the patterns unicode, and ultimately result in unicode
137 146 # keys in kwargs to handler._execute(**kwargs) in tornado.
138 147 # This enforces that base_project_url be ascii in that situation.
139 148 #
140 149 # Note that the URLs these patterns check against are escaped,
141 150 # and thus guaranteed to be ASCII: 'hΓ©llo' is really 'h%C3%A9llo'.
142 151 base_project_url = py3compat.unicode_to_str(base_project_url, 'ascii')
143 152
144 153 # prepend base_project_url onto the patterns that we match
145 154 new_handlers = []
146 155 for handler in handlers:
147 156 pattern = url_path_join(base_project_url, handler[0])
148 157 new_handler = tuple([pattern]+list(handler[1:]))
149 158 new_handlers.append( new_handler )
150 159
151 160 super(NotebookWebApplication, self).__init__(new_handlers, **settings)
152 161
153 162 self.kernel_manager = kernel_manager
154 self.log = log
155 163 self.notebook_manager = notebook_manager
164 self.cluster_manager = cluster_manager
156 165 self.ipython_app = ipython_app
157 166 self.read_only = self.ipython_app.read_only
167 self.log = log
158 168
159 169
160 170 #-----------------------------------------------------------------------------
161 171 # Aliases and Flags
162 172 #-----------------------------------------------------------------------------
163 173
164 174 flags = dict(ipkernel_flags)
165 175 flags['no-browser']=(
166 176 {'NotebookApp' : {'open_browser' : False}},
167 177 "Don't open the notebook in a browser after startup."
168 178 )
169 179 flags['no-mathjax']=(
170 180 {'NotebookApp' : {'enable_mathjax' : False}},
171 181 """Disable MathJax
172 182
173 183 MathJax is the javascript library IPython uses to render math/LaTeX. It is
174 184 very large, so you may want to disable it if you have a slow internet
175 185 connection, or for offline use of the notebook.
176 186
177 187 When disabled, equations etc. will appear as their untransformed TeX source.
178 188 """
179 189 )
180 190 flags['read-only'] = (
181 191 {'NotebookApp' : {'read_only' : True}},
182 192 """Allow read-only access to notebooks.
183 193
184 194 When using a password to protect the notebook server, this flag
185 195 allows unauthenticated clients to view the notebook list, and
186 196 individual notebooks, but not edit them, start kernels, or run
187 197 code.
188 198
189 199 If no password is set, the server will be entirely read-only.
190 200 """
191 201 )
192 202
193 203 # Add notebook manager flags
194 204 flags.update(boolean_flag('script', 'NotebookManager.save_script',
195 205 'Auto-save a .py script everytime the .ipynb notebook is saved',
196 206 'Do not auto-save .py scripts for every notebook'))
197 207
198 208 # the flags that are specific to the frontend
199 209 # these must be scrubbed before being passed to the kernel,
200 210 # or it will raise an error on unrecognized flags
201 211 notebook_flags = ['no-browser', 'no-mathjax', 'read-only', 'script', 'no-script']
202 212
203 213 aliases = dict(ipkernel_aliases)
204 214
205 215 aliases.update({
206 216 'ip': 'NotebookApp.ip',
207 217 'port': 'NotebookApp.port',
208 218 'keyfile': 'NotebookApp.keyfile',
209 219 'certfile': 'NotebookApp.certfile',
210 220 'notebook-dir': 'NotebookManager.notebook_dir',
211 221 'browser': 'NotebookApp.browser',
212 222 })
213 223
214 224 # remove ipkernel flags that are singletons, and don't make sense in
215 225 # multi-kernel evironment:
216 226 aliases.pop('f', None)
217 227
218 228 notebook_aliases = [u'port', u'ip', u'keyfile', u'certfile',
219 229 u'notebook-dir']
220 230
221 231 #-----------------------------------------------------------------------------
222 232 # NotebookApp
223 233 #-----------------------------------------------------------------------------
224 234
225 235 class NotebookApp(BaseIPythonApplication):
226 236
227 237 name = 'ipython-notebook'
228 238 default_config_file_name='ipython_notebook_config.py'
229 239
230 240 description = """
231 241 The IPython HTML Notebook.
232 242
233 243 This launches a Tornado based HTML Notebook Server that serves up an
234 244 HTML5/Javascript Notebook client.
235 245 """
236 246 examples = _examples
237 247
238 248 classes = [IPKernelApp, ZMQInteractiveShell, ProfileDir, Session,
239 249 MappingKernelManager, NotebookManager]
240 250 flags = Dict(flags)
241 251 aliases = Dict(aliases)
242 252
243 253 kernel_argv = List(Unicode)
244 254
245 255 log_level = Enum((0,10,20,30,40,50,'DEBUG','INFO','WARN','ERROR','CRITICAL'),
246 256 default_value=logging.INFO,
247 257 config=True,
248 258 help="Set the log level by value or name.")
249 259
250 260 # create requested profiles by default, if they don't exist:
251 261 auto_create = Bool(True)
252 262
253 263 # Network related information.
254 264
255 265 ip = Unicode(LOCALHOST, config=True,
256 266 help="The IP address the notebook server will listen on."
257 267 )
258 268
259 269 def _ip_changed(self, name, old, new):
260 270 if new == u'*': self.ip = u''
261 271
262 272 port = Integer(8888, config=True,
263 273 help="The port the notebook server will listen on."
264 274 )
265 275
266 276 certfile = Unicode(u'', config=True,
267 277 help="""The full path to an SSL/TLS certificate file."""
268 278 )
269 279
270 280 keyfile = Unicode(u'', config=True,
271 281 help="""The full path to a private key file for usage with SSL/TLS."""
272 282 )
273 283
274 284 password = Unicode(u'', config=True,
275 285 help="""Hashed password to use for web authentication.
276 286
277 287 To generate, type in a python/IPython shell:
278 288
279 289 from IPython.lib import passwd; passwd()
280 290
281 291 The string should be of the form type:salt:hashed-password.
282 292 """
283 293 )
284 294
285 295 open_browser = Bool(True, config=True,
286 296 help="""Whether to open in a browser after starting.
287 297 The specific browser used is platform dependent and
288 298 determined by the python standard library `webbrowser`
289 299 module, unless it is overridden using the --browser
290 300 (NotebookApp.browser) configuration option.
291 301 """)
292 302
293 303 browser = Unicode(u'', config=True,
294 304 help="""Specify what command to use to invoke a web
295 305 browser when opening the notebook. If not specified, the
296 306 default browser will be determined by the `webbrowser`
297 307 standard library module, which allows setting of the
298 308 BROWSER environment variable to override it.
299 309 """)
300 310
301 311 read_only = Bool(False, config=True,
302 312 help="Whether to prevent editing/execution of notebooks."
303 313 )
304 314
305 315 webapp_settings = Dict(config=True,
306 316 help="Supply overrides for the tornado.web.Application that the "
307 317 "IPython notebook uses.")
308 318
309 319 enable_mathjax = Bool(True, config=True,
310 320 help="""Whether to enable MathJax for typesetting math/TeX
311 321
312 322 MathJax is the javascript library IPython uses to render math/LaTeX. It is
313 323 very large, so you may want to disable it if you have a slow internet
314 324 connection, or for offline use of the notebook.
315 325
316 326 When disabled, equations etc. will appear as their untransformed TeX source.
317 327 """
318 328 )
319 329 def _enable_mathjax_changed(self, name, old, new):
320 330 """set mathjax url to empty if mathjax is disabled"""
321 331 if not new:
322 332 self.mathjax_url = u''
323 333
324 334 base_project_url = Unicode('/', config=True,
325 335 help='''The base URL for the notebook server''')
326 336 base_kernel_url = Unicode('/', config=True,
327 337 help='''The base URL for the kernel server''')
328 338 websocket_host = Unicode("", config=True,
329 339 help="""The hostname for the websocket server."""
330 340 )
331 341
332 342 mathjax_url = Unicode("", config=True,
333 343 help="""The url for MathJax.js."""
334 344 )
335 345 def _mathjax_url_default(self):
336 346 if not self.enable_mathjax:
337 347 return u''
338 348 static_path = self.webapp_settings.get("static_path", os.path.join(os.path.dirname(__file__), "static"))
339 349 static_url_prefix = self.webapp_settings.get("static_url_prefix",
340 350 "/static/")
341 351 if os.path.exists(os.path.join(static_path, 'mathjax', "MathJax.js")):
342 352 self.log.info("Using local MathJax")
343 353 return static_url_prefix+u"mathjax/MathJax.js"
344 354 else:
345 355 self.log.info("Using MathJax from CDN")
346 356 hostname = "cdn.mathjax.org"
347 357 try:
348 358 # resolve mathjax cdn alias to cloudfront, because Amazon's SSL certificate
349 359 # only works on *.cloudfront.net
350 360 true_host, aliases, IPs = socket.gethostbyname_ex(hostname)
351 361 # I've run this on a few machines, and some put the right answer in true_host,
352 362 # while others gave it in the aliases list, so we check both.
353 363 aliases.insert(0, true_host)
354 364 except Exception:
355 365 self.log.warn("Couldn't determine MathJax CDN info")
356 366 else:
357 367 for alias in aliases:
358 368 parts = alias.split('.')
359 369 # want static foo.cloudfront.net, not dynamic foo.lax3.cloudfront.net
360 370 if len(parts) == 3 and alias.endswith(".cloudfront.net"):
361 371 hostname = alias
362 372 break
363 373
364 374 if not hostname.endswith(".cloudfront.net"):
365 375 self.log.error("Couldn't resolve CloudFront host, required for HTTPS MathJax.")
366 376 self.log.error("Loading from https://cdn.mathjax.org will probably fail due to invalid certificate.")
367 377 self.log.error("For unsecured HTTP access to MathJax use config:")
368 378 self.log.error("NotebookApp.mathjax_url='http://cdn.mathjax.org/mathjax/latest/MathJax.js'")
369 379 return u"https://%s/mathjax/latest/MathJax.js" % hostname
370 380
371 381 def _mathjax_url_changed(self, name, old, new):
372 382 if new and not self.enable_mathjax:
373 383 # enable_mathjax=False overrides mathjax_url
374 384 self.mathjax_url = u''
375 385 else:
376 386 self.log.info("Using MathJax: %s", new)
377 387
378 388 def parse_command_line(self, argv=None):
379 389 super(NotebookApp, self).parse_command_line(argv)
380 390 if argv is None:
381 391 argv = sys.argv[1:]
382 392
383 393 # Scrub frontend-specific flags
384 394 self.kernel_argv = swallow_argv(argv, notebook_aliases, notebook_flags)
385 395 # Kernel should inherit default config file from frontend
386 396 self.kernel_argv.append("--KernelApp.parent_appname='%s'"%self.name)
387 397
388 398 def init_configurables(self):
389 399 # force Session default to be secure
390 400 default_secure(self.config)
391 401 # Create a KernelManager and start a kernel.
392 402 self.kernel_manager = MappingKernelManager(
393 403 config=self.config, log=self.log, kernel_argv=self.kernel_argv,
394 404 connection_dir = self.profile_dir.security_dir,
395 405 )
396 406 self.notebook_manager = NotebookManager(config=self.config, log=self.log)
397 407 self.notebook_manager.list_notebooks()
408 self.cluster_manager = ClusterManager(config=self.config, log=self.log)
398 409
399 410 def init_logging(self):
400 411 super(NotebookApp, self).init_logging()
401 412 # This prevents double log messages because tornado use a root logger that
402 413 # self.log is a child of. The logging module dipatches log messages to a log
403 414 # and all of its ancenstors until propagate is set to False.
404 415 self.log.propagate = False
405 416
406 417 def init_webapp(self):
407 418 """initialize tornado webapp and httpserver"""
408 419 self.web_app = NotebookWebApplication(
409 self, self.kernel_manager, self.notebook_manager, self.log,
420 self, self.kernel_manager, self.notebook_manager,
421 self.cluster_manager, self.log,
410 422 self.base_project_url, self.webapp_settings
411 423 )
412 424 if self.certfile:
413 425 ssl_options = dict(certfile=self.certfile)
414 426 if self.keyfile:
415 427 ssl_options['keyfile'] = self.keyfile
416 428 else:
417 429 ssl_options = None
418 430 self.web_app.password = self.password
419 431 self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options)
420 432 if ssl_options is None and not self.ip and not (self.read_only and not self.password):
421 433 self.log.critical('WARNING: the notebook server is listening on all IP addresses '
422 434 'but not using any encryption or authentication. This is highly '
423 435 'insecure and not recommended.')
424 436
425 437 # Try random ports centered around the default.
426 438 from random import randint
427 439 n = 50 # Max number of attempts, keep reasonably large.
428 440 for port in range(self.port, self.port+5) + [self.port + randint(-2*n, 2*n) for i in range(n-5)]:
429 441 try:
430 442 self.http_server.listen(port, self.ip)
431 443 except socket.error, e:
432 444 if e.errno != errno.EADDRINUSE:
433 445 raise
434 446 self.log.info('The port %i is already in use, trying another random port.' % port)
435 447 else:
436 448 self.port = port
437 449 break
438 450
439 451 @catch_config_error
440 452 def initialize(self, argv=None):
441 453 super(NotebookApp, self).initialize(argv)
442 454 self.init_configurables()
443 455 self.init_webapp()
444 456
445 457 def cleanup_kernels(self):
446 458 """shutdown all kernels
447 459
448 460 The kernels will shutdown themselves when this process no longer exists,
449 461 but explicit shutdown allows the KernelManagers to cleanup the connection files.
450 462 """
451 463 self.log.info('Shutting down kernels')
452 464 km = self.kernel_manager
453 465 # copy list, since kill_kernel deletes keys
454 466 for kid in list(km.kernel_ids):
455 467 km.kill_kernel(kid)
456 468
457 469 def start(self):
458 470 ip = self.ip if self.ip else '[all ip addresses on your system]'
459 471 proto = 'https' if self.certfile else 'http'
460 472 info = self.log.info
461 473 info("The IPython Notebook is running at: %s://%s:%i%s" %
462 474 (proto, ip, self.port,self.base_project_url) )
463 475 info("Use Control-C to stop this server and shut down all kernels.")
464 476
465 477 if self.open_browser:
466 478 ip = self.ip or '127.0.0.1'
467 479 if self.browser:
468 480 browser = webbrowser.get(self.browser)
469 481 else:
470 482 browser = webbrowser.get()
471 483 b = lambda : browser.open("%s://%s:%i%s" % (proto, ip, self.port,
472 484 self.base_project_url),
473 485 new=2)
474 486 threading.Thread(target=b).start()
475 487 try:
476 488 ioloop.IOLoop.instance().start()
477 489 except KeyboardInterrupt:
478 490 info("Interrupted...")
479 491 finally:
480 492 self.cleanup_kernels()
481 493
482 494
483 495 #-----------------------------------------------------------------------------
484 496 # Main entry point
485 497 #-----------------------------------------------------------------------------
486 498
487 499 def launch_new_instance():
488 500 app = NotebookApp.instance()
489 501 app.initialize()
490 502 app.start()
491 503
@@ -1,1221 +1,1223 b''
1 1 # encoding: utf-8
2 2 """
3 3 Facilities for launching IPython processes asynchronously.
4 4
5 5 Authors:
6 6
7 7 * Brian Granger
8 8 * MinRK
9 9 """
10 10
11 11 #-----------------------------------------------------------------------------
12 12 # Copyright (C) 2008-2011 The IPython Development Team
13 13 #
14 14 # Distributed under the terms of the BSD License. The full license is in
15 15 # the file COPYING, distributed as part of this software.
16 16 #-----------------------------------------------------------------------------
17 17
18 18 #-----------------------------------------------------------------------------
19 19 # Imports
20 20 #-----------------------------------------------------------------------------
21 21
22 22 import copy
23 23 import logging
24 24 import os
25 25 import re
26 26 import stat
27 27 import time
28 28
29 29 # signal imports, handling various platforms, versions
30 30
31 31 from signal import SIGINT, SIGTERM
32 32 try:
33 33 from signal import SIGKILL
34 34 except ImportError:
35 35 # Windows
36 36 SIGKILL=SIGTERM
37 37
38 38 try:
39 39 # Windows >= 2.7, 3.2
40 40 from signal import CTRL_C_EVENT as SIGINT
41 41 except ImportError:
42 42 pass
43 43
44 44 from subprocess import Popen, PIPE, STDOUT
45 45 try:
46 46 from subprocess import check_output
47 47 except ImportError:
48 48 # pre-2.7, define check_output with Popen
49 49 def check_output(*args, **kwargs):
50 50 kwargs.update(dict(stdout=PIPE))
51 51 p = Popen(*args, **kwargs)
52 52 out,err = p.communicate()
53 53 return out
54 54
55 55 from zmq.eventloop import ioloop
56 56
57 57 from IPython.config.application import Application
58 58 from IPython.config.configurable import LoggingConfigurable
59 59 from IPython.utils.text import EvalFormatter
60 60 from IPython.utils.traitlets import (
61 61 Any, Integer, CFloat, List, Unicode, Dict, Instance, HasTraits,
62 62 )
63 63 from IPython.utils.path import get_ipython_module_path
64 64 from IPython.utils.process import find_cmd, pycmd2argv, FindCmdError
65 65
66 66 from .win32support import forward_read_events
67 67
68 68 from .winhpcjob import IPControllerTask, IPEngineTask, IPControllerJob, IPEngineSetJob
69 69
70 70 WINDOWS = os.name == 'nt'
71 71
72 72 #-----------------------------------------------------------------------------
73 73 # Paths to the kernel apps
74 74 #-----------------------------------------------------------------------------
75 75
76 76
77 77 ipcluster_cmd_argv = pycmd2argv(get_ipython_module_path(
78 78 'IPython.parallel.apps.ipclusterapp'
79 79 ))
80 80
81 81 ipengine_cmd_argv = pycmd2argv(get_ipython_module_path(
82 82 'IPython.parallel.apps.ipengineapp'
83 83 ))
84 84
85 85 ipcontroller_cmd_argv = pycmd2argv(get_ipython_module_path(
86 86 'IPython.parallel.apps.ipcontrollerapp'
87 87 ))
88 88
89 89 #-----------------------------------------------------------------------------
90 90 # Base launchers and errors
91 91 #-----------------------------------------------------------------------------
92 92
93 93
94 94 class LauncherError(Exception):
95 95 pass
96 96
97 97
98 98 class ProcessStateError(LauncherError):
99 99 pass
100 100
101 101
102 102 class UnknownStatus(LauncherError):
103 103 pass
104 104
105 105
106 106 class BaseLauncher(LoggingConfigurable):
107 107 """An asbtraction for starting, stopping and signaling a process."""
108 108
109 109 # In all of the launchers, the work_dir is where child processes will be
110 110 # run. This will usually be the profile_dir, but may not be. any work_dir
111 111 # passed into the __init__ method will override the config value.
112 112 # This should not be used to set the work_dir for the actual engine
113 113 # and controller. Instead, use their own config files or the
114 114 # controller_args, engine_args attributes of the launchers to add
115 115 # the work_dir option.
116 116 work_dir = Unicode(u'.')
117 117 loop = Instance('zmq.eventloop.ioloop.IOLoop')
118 118
119 119 start_data = Any()
120 120 stop_data = Any()
121 121
122 122 def _loop_default(self):
123 123 return ioloop.IOLoop.instance()
124 124
125 125 def __init__(self, work_dir=u'.', config=None, **kwargs):
126 126 super(BaseLauncher, self).__init__(work_dir=work_dir, config=config, **kwargs)
127 127 self.state = 'before' # can be before, running, after
128 128 self.stop_callbacks = []
129 129 self.start_data = None
130 130 self.stop_data = None
131 131
132 132 @property
133 133 def args(self):
134 134 """A list of cmd and args that will be used to start the process.
135 135
136 136 This is what is passed to :func:`spawnProcess` and the first element
137 137 will be the process name.
138 138 """
139 139 return self.find_args()
140 140
141 141 def find_args(self):
142 142 """The ``.args`` property calls this to find the args list.
143 143
144 144 Subcommand should implement this to construct the cmd and args.
145 145 """
146 146 raise NotImplementedError('find_args must be implemented in a subclass')
147 147
148 148 @property
149 149 def arg_str(self):
150 150 """The string form of the program arguments."""
151 151 return ' '.join(self.args)
152 152
153 153 @property
154 154 def running(self):
155 155 """Am I running."""
156 156 if self.state == 'running':
157 157 return True
158 158 else:
159 159 return False
160 160
161 161 def start(self):
162 162 """Start the process."""
163 163 raise NotImplementedError('start must be implemented in a subclass')
164 164
165 165 def stop(self):
166 166 """Stop the process and notify observers of stopping.
167 167
168 168 This method will return None immediately.
169 169 To observe the actual process stopping, see :meth:`on_stop`.
170 170 """
171 171 raise NotImplementedError('stop must be implemented in a subclass')
172 172
173 173 def on_stop(self, f):
174 174 """Register a callback to be called with this Launcher's stop_data
175 175 when the process actually finishes.
176 176 """
177 177 if self.state=='after':
178 178 return f(self.stop_data)
179 179 else:
180 180 self.stop_callbacks.append(f)
181 181
182 182 def notify_start(self, data):
183 183 """Call this to trigger startup actions.
184 184
185 185 This logs the process startup and sets the state to 'running'. It is
186 186 a pass-through so it can be used as a callback.
187 187 """
188 188
189 189 self.log.debug('Process %r started: %r', self.args[0], data)
190 190 self.start_data = data
191 191 self.state = 'running'
192 192 return data
193 193
194 194 def notify_stop(self, data):
195 195 """Call this to trigger process stop actions.
196 196
197 197 This logs the process stopping and sets the state to 'after'. Call
198 198 this to trigger callbacks registered via :meth:`on_stop`."""
199 199
200 200 self.log.debug('Process %r stopped: %r', self.args[0], data)
201 201 self.stop_data = data
202 202 self.state = 'after'
203 203 for i in range(len(self.stop_callbacks)):
204 204 d = self.stop_callbacks.pop()
205 205 d(data)
206 206 return data
207 207
208 208 def signal(self, sig):
209 209 """Signal the process.
210 210
211 211 Parameters
212 212 ----------
213 213 sig : str or int
214 214 'KILL', 'INT', etc., or any signal number
215 215 """
216 216 raise NotImplementedError('signal must be implemented in a subclass')
217 217
218 218 class ClusterAppMixin(HasTraits):
219 219 """MixIn for cluster args as traits"""
220 220 cluster_args = List([])
221 221 profile_dir=Unicode('')
222 222 cluster_id=Unicode('')
223 223 def _profile_dir_changed(self, name, old, new):
224 224 self.cluster_args = []
225 225 if self.profile_dir:
226 226 self.cluster_args.extend(['--profile-dir', self.profile_dir])
227 227 if self.cluster_id:
228 228 self.cluster_args.extend(['--cluster-id', self.cluster_id])
229 229 _cluster_id_changed = _profile_dir_changed
230 230
231 231 class ControllerMixin(ClusterAppMixin):
232 232 controller_cmd = List(ipcontroller_cmd_argv, config=True,
233 233 help="""Popen command to launch ipcontroller.""")
234 234 # Command line arguments to ipcontroller.
235 235 controller_args = List(['--log-to-file','--log-level=%i' % logging.INFO], config=True,
236 236 help="""command-line args to pass to ipcontroller""")
237 237
238 238 class EngineMixin(ClusterAppMixin):
239 239 engine_cmd = List(ipengine_cmd_argv, config=True,
240 240 help="""command to launch the Engine.""")
241 241 # Command line arguments for ipengine.
242 242 engine_args = List(['--log-to-file','--log-level=%i' % logging.INFO], config=True,
243 243 help="command-line arguments to pass to ipengine"
244 244 )
245 245
246 246 #-----------------------------------------------------------------------------
247 247 # Local process launchers
248 248 #-----------------------------------------------------------------------------
249 249
250 250
251 251 class LocalProcessLauncher(BaseLauncher):
252 252 """Start and stop an external process in an asynchronous manner.
253 253
254 254 This will launch the external process with a working directory of
255 255 ``self.work_dir``.
256 256 """
257 257
258 258 # This is used to to construct self.args, which is passed to
259 259 # spawnProcess.
260 260 cmd_and_args = List([])
261 261 poll_frequency = Integer(100) # in ms
262 262
263 263 def __init__(self, work_dir=u'.', config=None, **kwargs):
264 264 super(LocalProcessLauncher, self).__init__(
265 265 work_dir=work_dir, config=config, **kwargs
266 266 )
267 267 self.process = None
268 268 self.poller = None
269 269
270 270 def find_args(self):
271 271 return self.cmd_and_args
272 272
273 273 def start(self):
274 274 self.log.debug("Starting %s: %r", self.__class__.__name__, self.args)
275 275 if self.state == 'before':
276 276 self.process = Popen(self.args,
277 277 stdout=PIPE,stderr=PIPE,stdin=PIPE,
278 278 env=os.environ,
279 279 cwd=self.work_dir
280 280 )
281 281 if WINDOWS:
282 282 self.stdout = forward_read_events(self.process.stdout)
283 283 self.stderr = forward_read_events(self.process.stderr)
284 284 else:
285 285 self.stdout = self.process.stdout.fileno()
286 286 self.stderr = self.process.stderr.fileno()
287 287 self.loop.add_handler(self.stdout, self.handle_stdout, self.loop.READ)
288 288 self.loop.add_handler(self.stderr, self.handle_stderr, self.loop.READ)
289 289 self.poller = ioloop.PeriodicCallback(self.poll, self.poll_frequency, self.loop)
290 290 self.poller.start()
291 291 self.notify_start(self.process.pid)
292 292 else:
293 293 s = 'The process was already started and has state: %r' % self.state
294 294 raise ProcessStateError(s)
295 295
296 296 def stop(self):
297 297 return self.interrupt_then_kill()
298 298
299 299 def signal(self, sig):
300 300 if self.state == 'running':
301 301 if WINDOWS and sig != SIGINT:
302 302 # use Windows tree-kill for better child cleanup
303 303 check_output(['taskkill', '-pid', str(self.process.pid), '-t', '-f'])
304 304 else:
305 305 self.process.send_signal(sig)
306 306
307 307 def interrupt_then_kill(self, delay=2.0):
308 308 """Send INT, wait a delay and then send KILL."""
309 309 try:
310 310 self.signal(SIGINT)
311 311 except Exception:
312 312 self.log.debug("interrupt failed")
313 313 pass
314 314 self.killer = ioloop.DelayedCallback(lambda : self.signal(SIGKILL), delay*1000, self.loop)
315 315 self.killer.start()
316 316
317 317 # callbacks, etc:
318 318
319 319 def handle_stdout(self, fd, events):
320 320 if WINDOWS:
321 321 line = self.stdout.recv()
322 322 else:
323 323 line = self.process.stdout.readline()
324 324 # a stopped process will be readable but return empty strings
325 325 if line:
326 326 self.log.debug(line[:-1])
327 327 else:
328 328 self.poll()
329 329
330 330 def handle_stderr(self, fd, events):
331 331 if WINDOWS:
332 332 line = self.stderr.recv()
333 333 else:
334 334 line = self.process.stderr.readline()
335 335 # a stopped process will be readable but return empty strings
336 336 if line:
337 337 self.log.debug(line[:-1])
338 338 else:
339 339 self.poll()
340 340
341 341 def poll(self):
342 342 status = self.process.poll()
343 343 if status is not None:
344 344 self.poller.stop()
345 345 self.loop.remove_handler(self.stdout)
346 346 self.loop.remove_handler(self.stderr)
347 347 self.notify_stop(dict(exit_code=status, pid=self.process.pid))
348 348 return status
349 349
350 350 class LocalControllerLauncher(LocalProcessLauncher, ControllerMixin):
351 351 """Launch a controller as a regular external process."""
352 352
353 353 def find_args(self):
354 354 return self.controller_cmd + self.cluster_args + self.controller_args
355 355
356 356 def start(self):
357 357 """Start the controller by profile_dir."""
358 358 return super(LocalControllerLauncher, self).start()
359 359
360 360
361 361 class LocalEngineLauncher(LocalProcessLauncher, EngineMixin):
362 362 """Launch a single engine as a regular externall process."""
363 363
364 364 def find_args(self):
365 365 return self.engine_cmd + self.cluster_args + self.engine_args
366 366
367 367
368 368 class LocalEngineSetLauncher(LocalEngineLauncher):
369 369 """Launch a set of engines as regular external processes."""
370 370
371 371 delay = CFloat(0.1, config=True,
372 372 help="""delay (in seconds) between starting each engine after the first.
373 373 This can help force the engines to get their ids in order, or limit
374 374 process flood when starting many engines."""
375 375 )
376 376
377 377 # launcher class
378 378 launcher_class = LocalEngineLauncher
379 379
380 380 launchers = Dict()
381 381 stop_data = Dict()
382 382
383 383 def __init__(self, work_dir=u'.', config=None, **kwargs):
384 384 super(LocalEngineSetLauncher, self).__init__(
385 385 work_dir=work_dir, config=config, **kwargs
386 386 )
387 387 self.stop_data = {}
388 388
389 389 def start(self, n):
390 390 """Start n engines by profile or profile_dir."""
391 391 dlist = []
392 392 for i in range(n):
393 393 if i > 0:
394 394 time.sleep(self.delay)
395 395 el = self.launcher_class(work_dir=self.work_dir, config=self.config, log=self.log,
396 396 profile_dir=self.profile_dir, cluster_id=self.cluster_id,
397 397 )
398 398
399 399 # Copy the engine args over to each engine launcher.
400 400 el.engine_cmd = copy.deepcopy(self.engine_cmd)
401 401 el.engine_args = copy.deepcopy(self.engine_args)
402 402 el.on_stop(self._notice_engine_stopped)
403 403 d = el.start()
404 404 self.launchers[i] = el
405 405 dlist.append(d)
406 406 self.notify_start(dlist)
407 407 return dlist
408 408
409 409 def find_args(self):
410 410 return ['engine set']
411 411
412 412 def signal(self, sig):
413 413 dlist = []
414 414 for el in self.launchers.itervalues():
415 415 d = el.signal(sig)
416 416 dlist.append(d)
417 417 return dlist
418 418
419 419 def interrupt_then_kill(self, delay=1.0):
420 420 dlist = []
421 421 for el in self.launchers.itervalues():
422 422 d = el.interrupt_then_kill(delay)
423 423 dlist.append(d)
424 424 return dlist
425 425
426 426 def stop(self):
427 427 return self.interrupt_then_kill()
428 428
429 429 def _notice_engine_stopped(self, data):
430 430 pid = data['pid']
431 431 for idx,el in self.launchers.iteritems():
432 432 if el.process.pid == pid:
433 433 break
434 434 self.launchers.pop(idx)
435 435 self.stop_data[idx] = data
436 436 if not self.launchers:
437 437 self.notify_stop(self.stop_data)
438 438
439 439
440 440 #-----------------------------------------------------------------------------
441 441 # MPI launchers
442 442 #-----------------------------------------------------------------------------
443 443
444 444
445 445 class MPILauncher(LocalProcessLauncher):
446 446 """Launch an external process using mpiexec."""
447 447
448 448 mpi_cmd = List(['mpiexec'], config=True,
449 449 help="The mpiexec command to use in starting the process."
450 450 )
451 451 mpi_args = List([], config=True,
452 452 help="The command line arguments to pass to mpiexec."
453 453 )
454 454 program = List(['date'],
455 455 help="The program to start via mpiexec.")
456 456 program_args = List([],
457 457 help="The command line argument to the program."
458 458 )
459 459 n = Integer(1)
460 460
461 461 def __init__(self, *args, **kwargs):
462 462 # deprecation for old MPIExec names:
463 463 config = kwargs.get('config', {})
464 464 for oldname in ('MPIExecLauncher', 'MPIExecControllerLauncher', 'MPIExecEngineSetLauncher'):
465 465 deprecated = config.get(oldname)
466 466 if deprecated:
467 467 newname = oldname.replace('MPIExec', 'MPI')
468 468 config[newname].update(deprecated)
469 469 self.log.warn("WARNING: %s name has been deprecated, use %s", oldname, newname)
470 470
471 471 super(MPILauncher, self).__init__(*args, **kwargs)
472 472
473 473 def find_args(self):
474 474 """Build self.args using all the fields."""
475 475 return self.mpi_cmd + ['-n', str(self.n)] + self.mpi_args + \
476 476 self.program + self.program_args
477 477
478 478 def start(self, n):
479 479 """Start n instances of the program using mpiexec."""
480 480 self.n = n
481 481 return super(MPILauncher, self).start()
482 482
483 483
484 484 class MPIControllerLauncher(MPILauncher, ControllerMixin):
485 485 """Launch a controller using mpiexec."""
486 486
487 487 # alias back to *non-configurable* program[_args] for use in find_args()
488 488 # this way all Controller/EngineSetLaunchers have the same form, rather
489 489 # than *some* having `program_args` and others `controller_args`
490 490 @property
491 491 def program(self):
492 492 return self.controller_cmd
493 493
494 494 @property
495 495 def program_args(self):
496 496 return self.cluster_args + self.controller_args
497 497
498 498 def start(self):
499 499 """Start the controller by profile_dir."""
500 500 return super(MPIControllerLauncher, self).start(1)
501 501
502 502
503 503 class MPIEngineSetLauncher(MPILauncher, EngineMixin):
504 504 """Launch engines using mpiexec"""
505 505
506 506 # alias back to *non-configurable* program[_args] for use in find_args()
507 507 # this way all Controller/EngineSetLaunchers have the same form, rather
508 508 # than *some* having `program_args` and others `controller_args`
509 509 @property
510 510 def program(self):
511 511 return self.engine_cmd
512 512
513 513 @property
514 514 def program_args(self):
515 515 return self.cluster_args + self.engine_args
516 516
517 517 def start(self, n):
518 518 """Start n engines by profile or profile_dir."""
519 519 self.n = n
520 520 return super(MPIEngineSetLauncher, self).start(n)
521 521
522 522 # deprecated MPIExec names
523 523 class DeprecatedMPILauncher(object):
524 524 def warn(self):
525 525 oldname = self.__class__.__name__
526 526 newname = oldname.replace('MPIExec', 'MPI')
527 527 self.log.warn("WARNING: %s name is deprecated, use %s", oldname, newname)
528 528
529 529 class MPIExecLauncher(MPILauncher, DeprecatedMPILauncher):
530 530 """Deprecated, use MPILauncher"""
531 531 def __init__(self, *args, **kwargs):
532 532 super(MPIExecLauncher, self).__init__(*args, **kwargs)
533 533 self.warn()
534 534
535 535 class MPIExecControllerLauncher(MPIControllerLauncher, DeprecatedMPILauncher):
536 536 """Deprecated, use MPIControllerLauncher"""
537 537 def __init__(self, *args, **kwargs):
538 538 super(MPIExecControllerLauncher, self).__init__(*args, **kwargs)
539 539 self.warn()
540 540
541 541 class MPIExecEngineSetLauncher(MPIEngineSetLauncher, DeprecatedMPILauncher):
542 542 """Deprecated, use MPIEngineSetLauncher"""
543 543 def __init__(self, *args, **kwargs):
544 544 super(MPIExecEngineSetLauncher, self).__init__(*args, **kwargs)
545 545 self.warn()
546 546
547 547
548 548 #-----------------------------------------------------------------------------
549 549 # SSH launchers
550 550 #-----------------------------------------------------------------------------
551 551
552 552 # TODO: Get SSH Launcher back to level of sshx in 0.10.2
553 553
554 554 class SSHLauncher(LocalProcessLauncher):
555 555 """A minimal launcher for ssh.
556 556
557 557 To be useful this will probably have to be extended to use the ``sshx``
558 558 idea for environment variables. There could be other things this needs
559 559 as well.
560 560 """
561 561
562 562 ssh_cmd = List(['ssh'], config=True,
563 563 help="command for starting ssh")
564 564 ssh_args = List(['-tt'], config=True,
565 565 help="args to pass to ssh")
566 566 program = List(['date'],
567 567 help="Program to launch via ssh")
568 568 program_args = List([],
569 569 help="args to pass to remote program")
570 570 hostname = Unicode('', config=True,
571 571 help="hostname on which to launch the program")
572 572 user = Unicode('', config=True,
573 573 help="username for ssh")
574 574 location = Unicode('', config=True,
575 575 help="user@hostname location for ssh in one setting")
576 576
577 577 def _hostname_changed(self, name, old, new):
578 578 if self.user:
579 579 self.location = u'%s@%s' % (self.user, new)
580 580 else:
581 581 self.location = new
582 582
583 583 def _user_changed(self, name, old, new):
584 584 self.location = u'%s@%s' % (new, self.hostname)
585 585
586 586 def find_args(self):
587 587 return self.ssh_cmd + self.ssh_args + [self.location] + \
588 588 self.program + self.program_args
589 589
590 590 def start(self, hostname=None, user=None):
591 591 if hostname is not None:
592 592 self.hostname = hostname
593 593 if user is not None:
594 594 self.user = user
595 595
596 596 return super(SSHLauncher, self).start()
597 597
598 598 def signal(self, sig):
599 599 if self.state == 'running':
600 600 # send escaped ssh connection-closer
601 601 self.process.stdin.write('~.')
602 602 self.process.stdin.flush()
603 603
604 604
605 605
606 606 class SSHControllerLauncher(SSHLauncher, ControllerMixin):
607 607
608 608 # alias back to *non-configurable* program[_args] for use in find_args()
609 609 # this way all Controller/EngineSetLaunchers have the same form, rather
610 610 # than *some* having `program_args` and others `controller_args`
611 611 @property
612 612 def program(self):
613 613 return self.controller_cmd
614 614
615 615 @property
616 616 def program_args(self):
617 617 return self.cluster_args + self.controller_args
618 618
619 619
620 620 class SSHEngineLauncher(SSHLauncher, EngineMixin):
621 621
622 622 # alias back to *non-configurable* program[_args] for use in find_args()
623 623 # this way all Controller/EngineSetLaunchers have the same form, rather
624 624 # than *some* having `program_args` and others `controller_args`
625 625 @property
626 626 def program(self):
627 627 return self.engine_cmd
628 628
629 629 @property
630 630 def program_args(self):
631 631 return self.cluster_args + self.engine_args
632 632
633 633
634 634 class SSHEngineSetLauncher(LocalEngineSetLauncher):
635 635 launcher_class = SSHEngineLauncher
636 636 engines = Dict(config=True,
637 637 help="""dict of engines to launch. This is a dict by hostname of ints,
638 638 corresponding to the number of engines to start on that host.""")
639 639
640 640 @property
641 641 def engine_count(self):
642 642 """determine engine count from `engines` dict"""
643 643 count = 0
644 644 for n in self.engines.itervalues():
645 645 if isinstance(n, (tuple,list)):
646 646 n,args = n
647 647 count += n
648 648 return count
649 649
650 650 def start(self, n):
651 651 """Start engines by profile or profile_dir.
652 652 `n` is ignored, and the `engines` config property is used instead.
653 653 """
654 654
655 655 dlist = []
656 656 for host, n in self.engines.iteritems():
657 657 if isinstance(n, (tuple, list)):
658 658 n, args = n
659 659 else:
660 660 args = copy.deepcopy(self.engine_args)
661 661
662 662 if '@' in host:
663 663 user,host = host.split('@',1)
664 664 else:
665 665 user=None
666 666 for i in range(n):
667 667 if i > 0:
668 668 time.sleep(self.delay)
669 669 el = self.launcher_class(work_dir=self.work_dir, config=self.config, log=self.log,
670 670 profile_dir=self.profile_dir, cluster_id=self.cluster_id,
671 671 )
672 672
673 673 # Copy the engine args over to each engine launcher.
674 674 el.engine_cmd = self.engine_cmd
675 675 el.engine_args = args
676 676 el.on_stop(self._notice_engine_stopped)
677 677 d = el.start(user=user, hostname=host)
678 678 self.launchers[ "%s/%i" % (host,i) ] = el
679 679 dlist.append(d)
680 680 self.notify_start(dlist)
681 681 return dlist
682 682
683 683
684 684
685 685 #-----------------------------------------------------------------------------
686 686 # Windows HPC Server 2008 scheduler launchers
687 687 #-----------------------------------------------------------------------------
688 688
689 689
690 690 # This is only used on Windows.
691 691 def find_job_cmd():
692 692 if WINDOWS:
693 693 try:
694 694 return find_cmd('job')
695 695 except (FindCmdError, ImportError):
696 696 # ImportError will be raised if win32api is not installed
697 697 return 'job'
698 698 else:
699 699 return 'job'
700 700
701 701
702 702 class WindowsHPCLauncher(BaseLauncher):
703 703
704 704 job_id_regexp = Unicode(r'\d+', config=True,
705 705 help="""A regular expression used to get the job id from the output of the
706 706 submit_command. """
707 707 )
708 708 job_file_name = Unicode(u'ipython_job.xml', config=True,
709 709 help="The filename of the instantiated job script.")
710 710 # The full path to the instantiated job script. This gets made dynamically
711 711 # by combining the work_dir with the job_file_name.
712 712 job_file = Unicode(u'')
713 713 scheduler = Unicode('', config=True,
714 714 help="The hostname of the scheduler to submit the job to.")
715 715 job_cmd = Unicode(find_job_cmd(), config=True,
716 716 help="The command for submitting jobs.")
717 717
718 718 def __init__(self, work_dir=u'.', config=None, **kwargs):
719 719 super(WindowsHPCLauncher, self).__init__(
720 720 work_dir=work_dir, config=config, **kwargs
721 721 )
722 722
723 723 @property
724 724 def job_file(self):
725 725 return os.path.join(self.work_dir, self.job_file_name)
726 726
727 727 def write_job_file(self, n):
728 728 raise NotImplementedError("Implement write_job_file in a subclass.")
729 729
730 730 def find_args(self):
731 731 return [u'job.exe']
732 732
733 733 def parse_job_id(self, output):
734 734 """Take the output of the submit command and return the job id."""
735 735 m = re.search(self.job_id_regexp, output)
736 736 if m is not None:
737 737 job_id = m.group()
738 738 else:
739 739 raise LauncherError("Job id couldn't be determined: %s" % output)
740 740 self.job_id = job_id
741 741 self.log.info('Job started with id: %r', job_id)
742 742 return job_id
743 743
744 744 def start(self, n):
745 745 """Start n copies of the process using the Win HPC job scheduler."""
746 746 self.write_job_file(n)
747 747 args = [
748 748 'submit',
749 749 '/jobfile:%s' % self.job_file,
750 750 '/scheduler:%s' % self.scheduler
751 751 ]
752 752 self.log.debug("Starting Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args),))
753 753
754 754 output = check_output([self.job_cmd]+args,
755 755 env=os.environ,
756 756 cwd=self.work_dir,
757 757 stderr=STDOUT
758 758 )
759 759 job_id = self.parse_job_id(output)
760 760 self.notify_start(job_id)
761 761 return job_id
762 762
763 763 def stop(self):
764 764 args = [
765 765 'cancel',
766 766 self.job_id,
767 767 '/scheduler:%s' % self.scheduler
768 768 ]
769 769 self.log.info("Stopping Win HPC Job: %s" % (self.job_cmd + ' ' + ' '.join(args),))
770 770 try:
771 771 output = check_output([self.job_cmd]+args,
772 772 env=os.environ,
773 773 cwd=self.work_dir,
774 774 stderr=STDOUT
775 775 )
776 776 except:
777 777 output = 'The job already appears to be stoppped: %r' % self.job_id
778 778 self.notify_stop(dict(job_id=self.job_id, output=output)) # Pass the output of the kill cmd
779 779 return output
780 780
781 781
782 782 class WindowsHPCControllerLauncher(WindowsHPCLauncher, ClusterAppMixin):
783 783
784 784 job_file_name = Unicode(u'ipcontroller_job.xml', config=True,
785 785 help="WinHPC xml job file.")
786 786 controller_args = List([], config=False,
787 787 help="extra args to pass to ipcontroller")
788 788
789 789 def write_job_file(self, n):
790 790 job = IPControllerJob(config=self.config)
791 791
792 792 t = IPControllerTask(config=self.config)
793 793 # The tasks work directory is *not* the actual work directory of
794 794 # the controller. It is used as the base path for the stdout/stderr
795 795 # files that the scheduler redirects to.
796 796 t.work_directory = self.profile_dir
797 797 # Add the profile_dir and from self.start().
798 798 t.controller_args.extend(self.cluster_args)
799 799 t.controller_args.extend(self.controller_args)
800 800 job.add_task(t)
801 801
802 802 self.log.debug("Writing job description file: %s", self.job_file)
803 803 job.write(self.job_file)
804 804
805 805 @property
806 806 def job_file(self):
807 807 return os.path.join(self.profile_dir, self.job_file_name)
808 808
809 809 def start(self):
810 810 """Start the controller by profile_dir."""
811 811 return super(WindowsHPCControllerLauncher, self).start(1)
812 812
813 813
814 814 class WindowsHPCEngineSetLauncher(WindowsHPCLauncher, ClusterAppMixin):
815 815
816 816 job_file_name = Unicode(u'ipengineset_job.xml', config=True,
817 817 help="jobfile for ipengines job")
818 818 engine_args = List([], config=False,
819 819 help="extra args to pas to ipengine")
820 820
821 821 def write_job_file(self, n):
822 822 job = IPEngineSetJob(config=self.config)
823 823
824 824 for i in range(n):
825 825 t = IPEngineTask(config=self.config)
826 826 # The tasks work directory is *not* the actual work directory of
827 827 # the engine. It is used as the base path for the stdout/stderr
828 828 # files that the scheduler redirects to.
829 829 t.work_directory = self.profile_dir
830 830 # Add the profile_dir and from self.start().
831 831 t.engine_args.extend(self.cluster_args)
832 832 t.engine_args.extend(self.engine_args)
833 833 job.add_task(t)
834 834
835 835 self.log.debug("Writing job description file: %s", self.job_file)
836 836 job.write(self.job_file)
837 837
838 838 @property
839 839 def job_file(self):
840 840 return os.path.join(self.profile_dir, self.job_file_name)
841 841
842 842 def start(self, n):
843 843 """Start the controller by profile_dir."""
844 844 return super(WindowsHPCEngineSetLauncher, self).start(n)
845 845
846 846
847 847 #-----------------------------------------------------------------------------
848 848 # Batch (PBS) system launchers
849 849 #-----------------------------------------------------------------------------
850 850
851 851 class BatchClusterAppMixin(ClusterAppMixin):
852 852 """ClusterApp mixin that updates the self.context dict, rather than cl-args."""
853 853 def _profile_dir_changed(self, name, old, new):
854 854 self.context[name] = new
855 855 _cluster_id_changed = _profile_dir_changed
856 856
857 857 def _profile_dir_default(self):
858 858 self.context['profile_dir'] = ''
859 859 return ''
860 860 def _cluster_id_default(self):
861 861 self.context['cluster_id'] = ''
862 862 return ''
863 863
864 864
865 865 class BatchSystemLauncher(BaseLauncher):
866 866 """Launch an external process using a batch system.
867 867
868 868 This class is designed to work with UNIX batch systems like PBS, LSF,
869 869 GridEngine, etc. The overall model is that there are different commands
870 870 like qsub, qdel, etc. that handle the starting and stopping of the process.
871 871
872 872 This class also has the notion of a batch script. The ``batch_template``
873 873 attribute can be set to a string that is a template for the batch script.
874 874 This template is instantiated using string formatting. Thus the template can
875 875 use {n} fot the number of instances. Subclasses can add additional variables
876 876 to the template dict.
877 877 """
878 878
879 879 # Subclasses must fill these in. See PBSEngineSet
880 880 submit_command = List([''], config=True,
881 881 help="The name of the command line program used to submit jobs.")
882 882 delete_command = List([''], config=True,
883 883 help="The name of the command line program used to delete jobs.")
884 884 job_id_regexp = Unicode('', config=True,
885 885 help="""A regular expression used to get the job id from the output of the
886 886 submit_command.""")
887 887 batch_template = Unicode('', config=True,
888 888 help="The string that is the batch script template itself.")
889 889 batch_template_file = Unicode(u'', config=True,
890 890 help="The file that contains the batch template.")
891 891 batch_file_name = Unicode(u'batch_script', config=True,
892 892 help="The filename of the instantiated batch script.")
893 893 queue = Unicode(u'', config=True,
894 894 help="The PBS Queue.")
895 895
896 896 def _queue_changed(self, name, old, new):
897 897 self.context[name] = new
898 898
899 899 n = Integer(1)
900 900 _n_changed = _queue_changed
901 901
902 902 # not configurable, override in subclasses
903 903 # PBS Job Array regex
904 904 job_array_regexp = Unicode('')
905 905 job_array_template = Unicode('')
906 906 # PBS Queue regex
907 907 queue_regexp = Unicode('')
908 908 queue_template = Unicode('')
909 909 # The default batch template, override in subclasses
910 910 default_template = Unicode('')
911 911 # The full path to the instantiated batch script.
912 912 batch_file = Unicode(u'')
913 913 # the format dict used with batch_template:
914 914 context = Dict()
915 915 def _context_default(self):
916 916 """load the default context with the default values for the basic keys
917 917
918 918 because the _trait_changed methods only load the context if they
919 919 are set to something other than the default value.
920 920 """
921 921 return dict(n=1, queue=u'', profile_dir=u'', cluster_id=u'')
922 922
923 923 # the Formatter instance for rendering the templates:
924 924 formatter = Instance(EvalFormatter, (), {})
925 925
926 926
927 927 def find_args(self):
928 928 return self.submit_command + [self.batch_file]
929 929
930 930 def __init__(self, work_dir=u'.', config=None, **kwargs):
931 931 super(BatchSystemLauncher, self).__init__(
932 932 work_dir=work_dir, config=config, **kwargs
933 933 )
934 934 self.batch_file = os.path.join(self.work_dir, self.batch_file_name)
935 935
936 936 def parse_job_id(self, output):
937 937 """Take the output of the submit command and return the job id."""
938 938 m = re.search(self.job_id_regexp, output)
939 939 if m is not None:
940 940 job_id = m.group()
941 941 else:
942 942 raise LauncherError("Job id couldn't be determined: %s" % output)
943 943 self.job_id = job_id
944 944 self.log.info('Job submitted with job id: %r', job_id)
945 945 return job_id
946 946
947 947 def write_batch_script(self, n):
948 948 """Instantiate and write the batch script to the work_dir."""
949 949 self.n = n
950 950 # first priority is batch_template if set
951 951 if self.batch_template_file and not self.batch_template:
952 952 # second priority is batch_template_file
953 953 with open(self.batch_template_file) as f:
954 954 self.batch_template = f.read()
955 955 if not self.batch_template:
956 956 # third (last) priority is default_template
957 957 self.batch_template = self.default_template
958 958
959 959 # add jobarray or queue lines to user-specified template
960 960 # note that this is *only* when user did not specify a template.
961 961 regex = re.compile(self.job_array_regexp)
962 962 # print regex.search(self.batch_template)
963 963 if not regex.search(self.batch_template):
964 964 self.log.debug("adding job array settings to batch script")
965 965 firstline, rest = self.batch_template.split('\n',1)
966 966 self.batch_template = u'\n'.join([firstline, self.job_array_template, rest])
967 967
968 968 regex = re.compile(self.queue_regexp)
969 969 # print regex.search(self.batch_template)
970 970 if self.queue and not regex.search(self.batch_template):
971 971 self.log.debug("adding PBS queue settings to batch script")
972 972 firstline, rest = self.batch_template.split('\n',1)
973 973 self.batch_template = u'\n'.join([firstline, self.queue_template, rest])
974 974
975 975 script_as_string = self.formatter.format(self.batch_template, **self.context)
976 976 self.log.debug('Writing batch script: %s', self.batch_file)
977 977
978 978 with open(self.batch_file, 'w') as f:
979 979 f.write(script_as_string)
980 980 os.chmod(self.batch_file, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
981 981
982 982 def start(self, n):
983 983 """Start n copies of the process using a batch system."""
984 984 self.log.debug("Starting %s: %r", self.__class__.__name__, self.args)
985 985 # Here we save profile_dir in the context so they
986 986 # can be used in the batch script template as {profile_dir}
987 987 self.write_batch_script(n)
988 988 output = check_output(self.args, env=os.environ)
989 989
990 990 job_id = self.parse_job_id(output)
991 991 self.notify_start(job_id)
992 992 return job_id
993 993
994 994 def stop(self):
995 995 output = check_output(self.delete_command+[self.job_id], env=os.environ)
996 996 self.notify_stop(dict(job_id=self.job_id, output=output)) # Pass the output of the kill cmd
997 997 return output
998 998
999 999
1000 1000 class PBSLauncher(BatchSystemLauncher):
1001 1001 """A BatchSystemLauncher subclass for PBS."""
1002 1002
1003 1003 submit_command = List(['qsub'], config=True,
1004 1004 help="The PBS submit command ['qsub']")
1005 1005 delete_command = List(['qdel'], config=True,
1006 1006 help="The PBS delete command ['qsub']")
1007 1007 job_id_regexp = Unicode(r'\d+', config=True,
1008 1008 help="Regular expresion for identifying the job ID [r'\d+']")
1009 1009
1010 1010 batch_file = Unicode(u'')
1011 1011 job_array_regexp = Unicode('#PBS\W+-t\W+[\w\d\-\$]+')
1012 1012 job_array_template = Unicode('#PBS -t 1-{n}')
1013 1013 queue_regexp = Unicode('#PBS\W+-q\W+\$?\w+')
1014 1014 queue_template = Unicode('#PBS -q {queue}')
1015 1015
1016 1016
1017 1017 class PBSControllerLauncher(PBSLauncher, BatchClusterAppMixin):
1018 1018 """Launch a controller using PBS."""
1019 1019
1020 1020 batch_file_name = Unicode(u'pbs_controller', config=True,
1021 1021 help="batch file name for the controller job.")
1022 1022 default_template= Unicode("""#!/bin/sh
1023 1023 #PBS -V
1024 1024 #PBS -N ipcontroller
1025 1025 %s --log-to-file --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1026 1026 """%(' '.join(ipcontroller_cmd_argv)))
1027 1027
1028 1028
1029 1029 def start(self):
1030 1030 """Start the controller by profile or profile_dir."""
1031 1031 return super(PBSControllerLauncher, self).start(1)
1032 1032
1033 1033
1034 1034 class PBSEngineSetLauncher(PBSLauncher, BatchClusterAppMixin):
1035 1035 """Launch Engines using PBS"""
1036 1036 batch_file_name = Unicode(u'pbs_engines', config=True,
1037 1037 help="batch file name for the engine(s) job.")
1038 1038 default_template= Unicode(u"""#!/bin/sh
1039 1039 #PBS -V
1040 1040 #PBS -N ipengine
1041 1041 %s --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1042 1042 """%(' '.join(ipengine_cmd_argv)))
1043 1043
1044 1044 def start(self, n):
1045 1045 """Start n engines by profile or profile_dir."""
1046 1046 return super(PBSEngineSetLauncher, self).start(n)
1047 1047
1048 1048 #SGE is very similar to PBS
1049 1049
1050 1050 class SGELauncher(PBSLauncher):
1051 1051 """Sun GridEngine is a PBS clone with slightly different syntax"""
1052 1052 job_array_regexp = Unicode('#\$\W+\-t')
1053 1053 job_array_template = Unicode('#$ -t 1-{n}')
1054 1054 queue_regexp = Unicode('#\$\W+-q\W+\$?\w+')
1055 1055 queue_template = Unicode('#$ -q {queue}')
1056 1056
1057 1057 class SGEControllerLauncher(SGELauncher, BatchClusterAppMixin):
1058 1058 """Launch a controller using SGE."""
1059 1059
1060 1060 batch_file_name = Unicode(u'sge_controller', config=True,
1061 1061 help="batch file name for the ipontroller job.")
1062 1062 default_template= Unicode(u"""#$ -V
1063 1063 #$ -S /bin/sh
1064 1064 #$ -N ipcontroller
1065 1065 %s --log-to-file --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1066 1066 """%(' '.join(ipcontroller_cmd_argv)))
1067 1067
1068 1068 def start(self):
1069 1069 """Start the controller by profile or profile_dir."""
1070 1070 return super(SGEControllerLauncher, self).start(1)
1071 1071
1072 1072 class SGEEngineSetLauncher(SGELauncher, BatchClusterAppMixin):
1073 1073 """Launch Engines with SGE"""
1074 1074 batch_file_name = Unicode(u'sge_engines', config=True,
1075 1075 help="batch file name for the engine(s) job.")
1076 1076 default_template = Unicode("""#$ -V
1077 1077 #$ -S /bin/sh
1078 1078 #$ -N ipengine
1079 1079 %s --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1080 1080 """%(' '.join(ipengine_cmd_argv)))
1081 1081
1082 1082 def start(self, n):
1083 1083 """Start n engines by profile or profile_dir."""
1084 1084 return super(SGEEngineSetLauncher, self).start(n)
1085 1085
1086 1086
1087 1087 # LSF launchers
1088 1088
1089 1089 class LSFLauncher(BatchSystemLauncher):
1090 1090 """A BatchSystemLauncher subclass for LSF."""
1091 1091
1092 1092 submit_command = List(['bsub'], config=True,
1093 1093 help="The PBS submit command ['bsub']")
1094 1094 delete_command = List(['bkill'], config=True,
1095 1095 help="The PBS delete command ['bkill']")
1096 1096 job_id_regexp = Unicode(r'\d+', config=True,
1097 1097 help="Regular expresion for identifying the job ID [r'\d+']")
1098 1098
1099 1099 batch_file = Unicode(u'')
1100 1100 job_array_regexp = Unicode('#BSUB[ \t]-J+\w+\[\d+-\d+\]')
1101 1101 job_array_template = Unicode('#BSUB -J ipengine[1-{n}]')
1102 1102 queue_regexp = Unicode('#BSUB[ \t]+-q[ \t]+\w+')
1103 1103 queue_template = Unicode('#BSUB -q {queue}')
1104 1104
1105 1105 def start(self, n):
1106 1106 """Start n copies of the process using LSF batch system.
1107 1107 This cant inherit from the base class because bsub expects
1108 1108 to be piped a shell script in order to honor the #BSUB directives :
1109 1109 bsub < script
1110 1110 """
1111 1111 # Here we save profile_dir in the context so they
1112 1112 # can be used in the batch script template as {profile_dir}
1113 1113 self.write_batch_script(n)
1114 1114 #output = check_output(self.args, env=os.environ)
1115 1115 piped_cmd = self.args[0]+'<\"'+self.args[1]+'\"'
1116 1116 self.log.debug("Starting %s: %s", self.__class__.__name__, piped_cmd)
1117 1117 p = Popen(piped_cmd, shell=True,env=os.environ,stdout=PIPE)
1118 1118 output,err = p.communicate()
1119 1119 job_id = self.parse_job_id(output)
1120 1120 self.notify_start(job_id)
1121 1121 return job_id
1122 1122
1123 1123
1124 1124 class LSFControllerLauncher(LSFLauncher, BatchClusterAppMixin):
1125 1125 """Launch a controller using LSF."""
1126 1126
1127 1127 batch_file_name = Unicode(u'lsf_controller', config=True,
1128 1128 help="batch file name for the controller job.")
1129 1129 default_template= Unicode("""#!/bin/sh
1130 1130 #BSUB -J ipcontroller
1131 1131 #BSUB -oo ipcontroller.o.%%J
1132 1132 #BSUB -eo ipcontroller.e.%%J
1133 1133 %s --log-to-file --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1134 1134 """%(' '.join(ipcontroller_cmd_argv)))
1135 1135
1136 1136 def start(self):
1137 1137 """Start the controller by profile or profile_dir."""
1138 1138 return super(LSFControllerLauncher, self).start(1)
1139 1139
1140 1140
1141 1141 class LSFEngineSetLauncher(LSFLauncher, BatchClusterAppMixin):
1142 1142 """Launch Engines using LSF"""
1143 1143 batch_file_name = Unicode(u'lsf_engines', config=True,
1144 1144 help="batch file name for the engine(s) job.")
1145 1145 default_template= Unicode(u"""#!/bin/sh
1146 1146 #BSUB -oo ipengine.o.%%J
1147 1147 #BSUB -eo ipengine.e.%%J
1148 1148 %s --profile-dir="{profile_dir}" --cluster-id="{cluster_id}"
1149 1149 """%(' '.join(ipengine_cmd_argv)))
1150 1150
1151 1151 def start(self, n):
1152 1152 """Start n engines by profile or profile_dir."""
1153 1153 return super(LSFEngineSetLauncher, self).start(n)
1154 1154
1155 1155
1156 1156 #-----------------------------------------------------------------------------
1157 1157 # A launcher for ipcluster itself!
1158 1158 #-----------------------------------------------------------------------------
1159 1159
1160 1160
1161 1161 class IPClusterLauncher(LocalProcessLauncher):
1162 1162 """Launch the ipcluster program in an external process."""
1163 1163
1164 1164 ipcluster_cmd = List(ipcluster_cmd_argv, config=True,
1165 1165 help="Popen command for ipcluster")
1166 1166 ipcluster_args = List(
1167 ['--clean-logs', '--log-to-file', '--log-level=%i'%logging.INFO], config=True,
1167 ['--clean-logs=True', '--log-to-file', '--log-level=%i'%logging.INFO], config=True,
1168 1168 help="Command line arguments to pass to ipcluster.")
1169 1169 ipcluster_subcommand = Unicode('start')
1170 ipcluster_profile = Unicode('default')
1170 1171 ipcluster_n = Integer(2)
1171 1172
1172 1173 def find_args(self):
1173 1174 return self.ipcluster_cmd + [self.ipcluster_subcommand] + \
1174 ['--n=%i'%self.ipcluster_n] + self.ipcluster_args
1175 ['--n=%i'%self.ipcluster_n, '--profile=%s'%self.ipcluster_profile] + \
1176 self.ipcluster_args
1175 1177
1176 1178 def start(self):
1177 1179 return super(IPClusterLauncher, self).start()
1178 1180
1179 1181 #-----------------------------------------------------------------------------
1180 1182 # Collections of launchers
1181 1183 #-----------------------------------------------------------------------------
1182 1184
1183 1185 local_launchers = [
1184 1186 LocalControllerLauncher,
1185 1187 LocalEngineLauncher,
1186 1188 LocalEngineSetLauncher,
1187 1189 ]
1188 1190 mpi_launchers = [
1189 1191 MPILauncher,
1190 1192 MPIControllerLauncher,
1191 1193 MPIEngineSetLauncher,
1192 1194 ]
1193 1195 ssh_launchers = [
1194 1196 SSHLauncher,
1195 1197 SSHControllerLauncher,
1196 1198 SSHEngineLauncher,
1197 1199 SSHEngineSetLauncher,
1198 1200 ]
1199 1201 winhpc_launchers = [
1200 1202 WindowsHPCLauncher,
1201 1203 WindowsHPCControllerLauncher,
1202 1204 WindowsHPCEngineSetLauncher,
1203 1205 ]
1204 1206 pbs_launchers = [
1205 1207 PBSLauncher,
1206 1208 PBSControllerLauncher,
1207 1209 PBSEngineSetLauncher,
1208 1210 ]
1209 1211 sge_launchers = [
1210 1212 SGELauncher,
1211 1213 SGEControllerLauncher,
1212 1214 SGEEngineSetLauncher,
1213 1215 ]
1214 1216 lsf_launchers = [
1215 1217 LSFLauncher,
1216 1218 LSFControllerLauncher,
1217 1219 LSFEngineSetLauncher,
1218 1220 ]
1219 1221 all_launchers = local_launchers + mpi_launchers + ssh_launchers + winhpc_launchers\
1220 1222 + pbs_launchers + sge_launchers + lsf_launchers
1221 1223
General Comments 0
You need to be logged in to leave comments. Login now