##// END OF EJS Templates
Merge pull request #7832 from minrk/default-secure-session...
Matthias Bussonnier -
r20576:478a8026 merge
parent child Browse files
Show More
@@ -1,345 +1,344 b''
1 1 """ A minimal application base mixin for all ZMQ based IPython frontends.
2 2
3 3 This is not a complete console app, as subprocess will not be able to receive
4 4 input, there is no real readline support, among other limitations. This is a
5 5 refactoring of what used to be the IPython/qt/console/qtconsoleapp.py
6 6 """
7 7 # Copyright (c) IPython Development Team.
8 8 # Distributed under the terms of the Modified BSD License.
9 9
10 10 import atexit
11 11 import os
12 12 import signal
13 13 import sys
14 14 import uuid
15 15
16 16
17 17 from IPython.config.application import boolean_flag
18 18 from IPython.core.profiledir import ProfileDir
19 19 from IPython.kernel.blocking import BlockingKernelClient
20 20 from IPython.kernel import KernelManager
21 21 from IPython.kernel import tunnel_to_kernel, find_connection_file, swallow_argv
22 22 from IPython.kernel.kernelspec import NoSuchKernel
23 23 from IPython.utils.path import filefind
24 24 from IPython.utils.traitlets import (
25 25 Dict, List, Unicode, CUnicode, CBool, Any
26 26 )
27 27 from IPython.kernel.zmq.kernelapp import (
28 28 kernel_flags,
29 29 kernel_aliases,
30 30 IPKernelApp
31 31 )
32 32 from IPython.kernel.zmq.pylab.config import InlineBackend
33 from IPython.kernel.zmq.session import Session, default_secure
33 from IPython.kernel.zmq.session import Session
34 34 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
35 35 from IPython.kernel.connect import ConnectionFileMixin
36 36
37 37 from IPython.utils.localinterfaces import localhost
38 38
39 39 #-----------------------------------------------------------------------------
40 40 # Aliases and Flags
41 41 #-----------------------------------------------------------------------------
42 42
43 43 flags = dict(kernel_flags)
44 44
45 45 # the flags that are specific to the frontend
46 46 # these must be scrubbed before being passed to the kernel,
47 47 # or it will raise an error on unrecognized flags
48 48 app_flags = {
49 49 'existing' : ({'IPythonConsoleApp' : {'existing' : 'kernel*.json'}},
50 50 "Connect to an existing kernel. If no argument specified, guess most recent"),
51 51 }
52 52 app_flags.update(boolean_flag(
53 53 'confirm-exit', 'IPythonConsoleApp.confirm_exit',
54 54 """Set to display confirmation dialog on exit. You can always use 'exit' or 'quit',
55 55 to force a direct exit without any confirmation.
56 56 """,
57 57 """Don't prompt the user when exiting. This will terminate the kernel
58 58 if it is owned by the frontend, and leave it alive if it is external.
59 59 """
60 60 ))
61 61 flags.update(app_flags)
62 62
63 63 aliases = dict(kernel_aliases)
64 64
65 65 # also scrub aliases from the frontend
66 66 app_aliases = dict(
67 67 ip = 'IPythonConsoleApp.ip',
68 68 transport = 'IPythonConsoleApp.transport',
69 69 hb = 'IPythonConsoleApp.hb_port',
70 70 shell = 'IPythonConsoleApp.shell_port',
71 71 iopub = 'IPythonConsoleApp.iopub_port',
72 72 stdin = 'IPythonConsoleApp.stdin_port',
73 73 existing = 'IPythonConsoleApp.existing',
74 74 f = 'IPythonConsoleApp.connection_file',
75 75
76 76 kernel = 'IPythonConsoleApp.kernel_name',
77 77
78 78 ssh = 'IPythonConsoleApp.sshserver',
79 79 )
80 80 aliases.update(app_aliases)
81 81
82 82 #-----------------------------------------------------------------------------
83 83 # Classes
84 84 #-----------------------------------------------------------------------------
85 85
86 86 classes = [KernelManager, ProfileDir, Session]
87 87
88 88 class IPythonConsoleApp(ConnectionFileMixin):
89 89 name = 'ipython-console-mixin'
90 90
91 91 description = """
92 92 The IPython Mixin Console.
93 93
94 94 This class contains the common portions of console client (QtConsole,
95 95 ZMQ-based terminal console, etc). It is not a full console, in that
96 96 launched terminal subprocesses will not be able to accept input.
97 97
98 98 The Console using this mixing supports various extra features beyond
99 99 the single-process Terminal IPython shell, such as connecting to
100 100 existing kernel, via:
101 101
102 102 ipython <appname> --existing
103 103
104 104 as well as tunnel via SSH
105 105
106 106 """
107 107
108 108 classes = classes
109 109 flags = Dict(flags)
110 110 aliases = Dict(aliases)
111 111 kernel_manager_class = KernelManager
112 112 kernel_client_class = BlockingKernelClient
113 113
114 114 kernel_argv = List(Unicode)
115 115 # frontend flags&aliases to be stripped when building kernel_argv
116 116 frontend_flags = Any(app_flags)
117 117 frontend_aliases = Any(app_aliases)
118 118
119 119 # create requested profiles by default, if they don't exist:
120 120 auto_create = CBool(True)
121 121 # connection info:
122 122
123 123 sshserver = Unicode('', config=True,
124 124 help="""The SSH server to use to connect to the kernel.""")
125 125 sshkey = Unicode('', config=True,
126 126 help="""Path to the ssh key to use for logging in to the ssh server.""")
127 127
128 128 def _connection_file_default(self):
129 129 return 'kernel-%i.json' % os.getpid()
130 130
131 131 existing = CUnicode('', config=True,
132 132 help="""Connect to an already running kernel""")
133 133
134 134 kernel_name = Unicode('python', config=True,
135 135 help="""The name of the default kernel to start.""")
136 136
137 137 confirm_exit = CBool(True, config=True,
138 138 help="""
139 139 Set to display confirmation dialog on exit. You can always use 'exit' or 'quit',
140 140 to force a direct exit without any confirmation.""",
141 141 )
142 142
143 143 @property
144 144 def help_classes(self):
145 145 """ConsoleApps can configure kernels on the command-line
146 146
147 147 But this shouldn't be written to a file
148 148 """
149 149 return self.classes + [IPKernelApp] + IPKernelApp.classes
150 150
151 151 def build_kernel_argv(self, argv=None):
152 152 """build argv to be passed to kernel subprocess"""
153 153 if argv is None:
154 154 argv = sys.argv[1:]
155 155 self.kernel_argv = swallow_argv(argv, self.frontend_aliases, self.frontend_flags)
156 156
157 157 def init_connection_file(self):
158 158 """find the connection file, and load the info if found.
159 159
160 160 The current working directory and the current profile's security
161 161 directory will be searched for the file if it is not given by
162 162 absolute path.
163 163
164 164 When attempting to connect to an existing kernel and the `--existing`
165 165 argument does not match an existing file, it will be interpreted as a
166 166 fileglob, and the matching file in the current profile's security dir
167 167 with the latest access time will be used.
168 168
169 169 After this method is called, self.connection_file contains the *full path*
170 170 to the connection file, never just its name.
171 171 """
172 172 if self.existing:
173 173 try:
174 174 cf = find_connection_file(self.existing)
175 175 except Exception:
176 176 self.log.critical("Could not find existing kernel connection file %s", self.existing)
177 177 self.exit(1)
178 178 self.log.debug("Connecting to existing kernel: %s" % cf)
179 179 self.connection_file = cf
180 180 else:
181 181 # not existing, check if we are going to write the file
182 182 # and ensure that self.connection_file is a full path, not just the shortname
183 183 try:
184 184 cf = find_connection_file(self.connection_file)
185 185 except Exception:
186 186 # file might not exist
187 187 if self.connection_file == os.path.basename(self.connection_file):
188 188 # just shortname, put it in security dir
189 189 cf = os.path.join(self.profile_dir.security_dir, self.connection_file)
190 190 else:
191 191 cf = self.connection_file
192 192 self.connection_file = cf
193 193 try:
194 194 self.connection_file = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
195 195 except IOError:
196 196 self.log.debug("Connection File not found: %s", self.connection_file)
197 197 return
198 198
199 199 # should load_connection_file only be used for existing?
200 200 # as it is now, this allows reusing ports if an existing
201 201 # file is requested
202 202 try:
203 203 self.load_connection_file()
204 204 except Exception:
205 205 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
206 206 self.exit(1)
207 207
208 208 def init_ssh(self):
209 209 """set up ssh tunnels, if needed."""
210 210 if not self.existing or (not self.sshserver and not self.sshkey):
211 211 return
212 212 self.load_connection_file()
213 213
214 214 transport = self.transport
215 215 ip = self.ip
216 216
217 217 if transport != 'tcp':
218 218 self.log.error("Can only use ssh tunnels with TCP sockets, not %s", transport)
219 219 sys.exit(-1)
220 220
221 221 if self.sshkey and not self.sshserver:
222 222 # specifying just the key implies that we are connecting directly
223 223 self.sshserver = ip
224 224 ip = localhost()
225 225
226 226 # build connection dict for tunnels:
227 227 info = dict(ip=ip,
228 228 shell_port=self.shell_port,
229 229 iopub_port=self.iopub_port,
230 230 stdin_port=self.stdin_port,
231 231 hb_port=self.hb_port
232 232 )
233 233
234 234 self.log.info("Forwarding connections to %s via %s"%(ip, self.sshserver))
235 235
236 236 # tunnels return a new set of ports, which will be on localhost:
237 237 self.ip = localhost()
238 238 try:
239 239 newports = tunnel_to_kernel(info, self.sshserver, self.sshkey)
240 240 except:
241 241 # even catch KeyboardInterrupt
242 242 self.log.error("Could not setup tunnels", exc_info=True)
243 243 self.exit(1)
244 244
245 245 self.shell_port, self.iopub_port, self.stdin_port, self.hb_port = newports
246 246
247 247 cf = self.connection_file
248 248 base,ext = os.path.splitext(cf)
249 249 base = os.path.basename(base)
250 250 self.connection_file = os.path.basename(base)+'-ssh'+ext
251 251 self.log.info("To connect another client via this tunnel, use:")
252 252 self.log.info("--existing %s" % self.connection_file)
253 253
254 254 def _new_connection_file(self):
255 255 cf = ''
256 256 while not cf:
257 257 # we don't need a 128b id to distinguish kernels, use more readable
258 258 # 48b node segment (12 hex chars). Users running more than 32k simultaneous
259 259 # kernels can subclass.
260 260 ident = str(uuid.uuid4()).split('-')[-1]
261 261 cf = os.path.join(self.profile_dir.security_dir, 'kernel-%s.json' % ident)
262 262 # only keep if it's actually new. Protect against unlikely collision
263 263 # in 48b random search space
264 264 cf = cf if not os.path.exists(cf) else ''
265 265 return cf
266 266
267 267 def init_kernel_manager(self):
268 268 # Don't let Qt or ZMQ swallow KeyboardInterupts.
269 269 if self.existing:
270 270 self.kernel_manager = None
271 271 return
272 272 signal.signal(signal.SIGINT, signal.SIG_DFL)
273 273
274 274 # Create a KernelManager and start a kernel.
275 275 try:
276 276 self.kernel_manager = self.kernel_manager_class(
277 277 ip=self.ip,
278 278 session=self.session,
279 279 transport=self.transport,
280 280 shell_port=self.shell_port,
281 281 iopub_port=self.iopub_port,
282 282 stdin_port=self.stdin_port,
283 283 hb_port=self.hb_port,
284 284 connection_file=self.connection_file,
285 285 kernel_name=self.kernel_name,
286 286 parent=self,
287 287 ipython_dir=self.ipython_dir,
288 288 )
289 289 except NoSuchKernel:
290 290 self.log.critical("Could not find kernel %s", self.kernel_name)
291 291 self.exit(1)
292 292
293 293 self.kernel_manager.client_factory = self.kernel_client_class
294 294 # FIXME: remove special treatment of IPython kernels
295 295 kwargs = {}
296 296 if self.kernel_manager.ipython_kernel:
297 297 kwargs['extra_arguments'] = self.kernel_argv
298 298 self.kernel_manager.start_kernel(**kwargs)
299 299 atexit.register(self.kernel_manager.cleanup_ipc_files)
300 300
301 301 if self.sshserver:
302 302 # ssh, write new connection file
303 303 self.kernel_manager.write_connection_file()
304 304
305 305 # in case KM defaults / ssh writing changes things:
306 306 km = self.kernel_manager
307 307 self.shell_port=km.shell_port
308 308 self.iopub_port=km.iopub_port
309 309 self.stdin_port=km.stdin_port
310 310 self.hb_port=km.hb_port
311 311 self.connection_file = km.connection_file
312 312
313 313 atexit.register(self.kernel_manager.cleanup_connection_file)
314 314
315 315 def init_kernel_client(self):
316 316 if self.kernel_manager is not None:
317 317 self.kernel_client = self.kernel_manager.client()
318 318 else:
319 319 self.kernel_client = self.kernel_client_class(
320 320 session=self.session,
321 321 ip=self.ip,
322 322 transport=self.transport,
323 323 shell_port=self.shell_port,
324 324 iopub_port=self.iopub_port,
325 325 stdin_port=self.stdin_port,
326 326 hb_port=self.hb_port,
327 327 connection_file=self.connection_file,
328 328 parent=self,
329 329 )
330 330
331 331 self.kernel_client.start_channels()
332 332
333 333
334 334
335 335 def initialize(self, argv=None):
336 336 """
337 337 Classes which mix this class in should call:
338 338 IPythonConsoleApp.initialize(self,argv)
339 339 """
340 340 self.init_connection_file()
341 default_secure(self.config)
342 341 self.init_ssh()
343 342 self.init_kernel_manager()
344 343 self.init_kernel_client()
345 344
@@ -1,1118 +1,1115 b''
1 1 # coding: utf-8
2 2 """A tornado based IPython notebook server."""
3 3
4 4 # Copyright (c) IPython Development Team.
5 5 # Distributed under the terms of the Modified BSD License.
6 6
7 7 from __future__ import print_function
8 8
9 9 import base64
10 10 import datetime
11 11 import errno
12 12 import importlib
13 13 import io
14 14 import json
15 15 import logging
16 16 import os
17 17 import random
18 18 import re
19 19 import select
20 20 import signal
21 21 import socket
22 22 import sys
23 23 import threading
24 24 import webbrowser
25 25
26 26
27 27 # check for pyzmq
28 28 from IPython.utils.zmqrelated import check_for_zmq
29 29 check_for_zmq('13', 'IPython.html')
30 30
31 31 from jinja2 import Environment, FileSystemLoader
32 32
33 33 # Install the pyzmq ioloop. This has to be done before anything else from
34 34 # tornado is imported.
35 35 from zmq.eventloop import ioloop
36 36 ioloop.install()
37 37
38 38 # check for tornado 3.1.0
39 39 msg = "The IPython Notebook requires tornado >= 4.0"
40 40 try:
41 41 import tornado
42 42 except ImportError:
43 43 raise ImportError(msg)
44 44 try:
45 45 version_info = tornado.version_info
46 46 except AttributeError:
47 47 raise ImportError(msg + ", but you have < 1.1.0")
48 48 if version_info < (4,0):
49 49 raise ImportError(msg + ", but you have %s" % tornado.version)
50 50
51 51 from tornado import httpserver
52 52 from tornado import web
53 53 from tornado.log import LogFormatter, app_log, access_log, gen_log
54 54
55 55 from IPython.html import (
56 56 DEFAULT_STATIC_FILES_PATH,
57 57 DEFAULT_TEMPLATE_PATH_LIST,
58 58 )
59 59 from .base.handlers import Template404
60 60 from .log import log_request
61 61 from .services.kernels.kernelmanager import MappingKernelManager
62 62 from .services.config import ConfigManager
63 63 from .services.contents.manager import ContentsManager
64 64 from .services.contents.filemanager import FileContentsManager
65 65 from .services.clusters.clustermanager import ClusterManager
66 66 from .services.sessions.sessionmanager import SessionManager
67 67
68 68 from .auth.login import LoginHandler
69 69 from .auth.logout import LogoutHandler
70 70 from .base.handlers import IPythonHandler, FileFindHandler
71 71
72 72 from IPython.config import Config
73 73 from IPython.config.application import catch_config_error, boolean_flag
74 74 from IPython.core.application import (
75 75 BaseIPythonApplication, base_flags, base_aliases,
76 76 )
77 77 from IPython.core.profiledir import ProfileDir
78 78 from IPython.kernel import KernelManager
79 79 from IPython.kernel.kernelspec import KernelSpecManager
80 from IPython.kernel.zmq.session import default_secure, Session
80 from IPython.kernel.zmq.session import Session
81 81 from IPython.nbformat.sign import NotebookNotary
82 82 from IPython.utils.importstring import import_item
83 83 from IPython.utils import submodule
84 84 from IPython.utils.process import check_pid
85 85 from IPython.utils.traitlets import (
86 86 Dict, Unicode, Integer, List, Bool, Bytes, Instance,
87 87 TraitError, Type,
88 88 )
89 89 from IPython.utils import py3compat
90 90 from IPython.utils.path import filefind, get_ipython_dir
91 91 from IPython.utils.sysinfo import get_sys_info
92 92
93 93 from .nbextensions import SYSTEM_NBEXTENSIONS_DIRS
94 94 from .utils import url_path_join
95 95
96 96 #-----------------------------------------------------------------------------
97 97 # Module globals
98 98 #-----------------------------------------------------------------------------
99 99
100 100 _examples = """
101 101 ipython notebook # start the notebook
102 102 ipython notebook --profile=sympy # use the sympy profile
103 103 ipython notebook --certfile=mycert.pem # use SSL/TLS certificate
104 104 """
105 105
106 106 #-----------------------------------------------------------------------------
107 107 # Helper functions
108 108 #-----------------------------------------------------------------------------
109 109
110 110 def random_ports(port, n):
111 111 """Generate a list of n random ports near the given port.
112 112
113 113 The first 5 ports will be sequential, and the remaining n-5 will be
114 114 randomly selected in the range [port-2*n, port+2*n].
115 115 """
116 116 for i in range(min(5, n)):
117 117 yield port + i
118 118 for i in range(n-5):
119 119 yield max(1, port + random.randint(-2*n, 2*n))
120 120
121 121 def load_handlers(name):
122 122 """Load the (URL pattern, handler) tuples for each component."""
123 123 name = 'IPython.html.' + name
124 124 mod = __import__(name, fromlist=['default_handlers'])
125 125 return mod.default_handlers
126 126
127 127 #-----------------------------------------------------------------------------
128 128 # The Tornado web application
129 129 #-----------------------------------------------------------------------------
130 130
131 131 class NotebookWebApplication(web.Application):
132 132
133 133 def __init__(self, ipython_app, kernel_manager, contents_manager,
134 134 cluster_manager, session_manager, kernel_spec_manager,
135 135 config_manager, log,
136 136 base_url, default_url, settings_overrides, jinja_env_options):
137 137
138 138 settings = self.init_settings(
139 139 ipython_app, kernel_manager, contents_manager, cluster_manager,
140 140 session_manager, kernel_spec_manager, config_manager, log, base_url,
141 141 default_url, settings_overrides, jinja_env_options)
142 142 handlers = self.init_handlers(settings)
143 143
144 144 super(NotebookWebApplication, self).__init__(handlers, **settings)
145 145
146 146 def init_settings(self, ipython_app, kernel_manager, contents_manager,
147 147 cluster_manager, session_manager, kernel_spec_manager,
148 148 config_manager,
149 149 log, base_url, default_url, settings_overrides,
150 150 jinja_env_options=None):
151 151
152 152 _template_path = settings_overrides.get(
153 153 "template_path",
154 154 ipython_app.template_file_path,
155 155 )
156 156 if isinstance(_template_path, str):
157 157 _template_path = (_template_path,)
158 158 template_path = [os.path.expanduser(path) for path in _template_path]
159 159
160 160 jenv_opt = jinja_env_options if jinja_env_options else {}
161 161 env = Environment(loader=FileSystemLoader(template_path), **jenv_opt)
162 162
163 163 sys_info = get_sys_info()
164 164 if sys_info['commit_source'] == 'repository':
165 165 # don't cache (rely on 304) when working from master
166 166 version_hash = ''
167 167 else:
168 168 # reset the cache on server restart
169 169 version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
170 170
171 171 settings = dict(
172 172 # basics
173 173 log_function=log_request,
174 174 base_url=base_url,
175 175 default_url=default_url,
176 176 template_path=template_path,
177 177 static_path=ipython_app.static_file_path,
178 178 static_handler_class = FileFindHandler,
179 179 static_url_prefix = url_path_join(base_url,'/static/'),
180 180 static_handler_args = {
181 181 # don't cache custom.js
182 182 'no_cache_paths': [url_path_join(base_url, 'static', 'custom')],
183 183 },
184 184 version_hash=version_hash,
185 185
186 186 # authentication
187 187 cookie_secret=ipython_app.cookie_secret,
188 188 login_url=url_path_join(base_url,'/login'),
189 189 login_handler_class=ipython_app.login_handler_class,
190 190 logout_handler_class=ipython_app.logout_handler_class,
191 191 password=ipython_app.password,
192 192
193 193 # managers
194 194 kernel_manager=kernel_manager,
195 195 contents_manager=contents_manager,
196 196 cluster_manager=cluster_manager,
197 197 session_manager=session_manager,
198 198 kernel_spec_manager=kernel_spec_manager,
199 199 config_manager=config_manager,
200 200
201 201 # IPython stuff
202 202 nbextensions_path=ipython_app.nbextensions_path,
203 203 websocket_url=ipython_app.websocket_url,
204 204 mathjax_url=ipython_app.mathjax_url,
205 205 config=ipython_app.config,
206 206 jinja2_env=env,
207 207 terminals_available=False, # Set later if terminals are available
208 208 )
209 209
210 210 # allow custom overrides for the tornado web app.
211 211 settings.update(settings_overrides)
212 212 return settings
213 213
214 214 def init_handlers(self, settings):
215 215 """Load the (URL pattern, handler) tuples for each component."""
216 216
217 217 # Order matters. The first handler to match the URL will handle the request.
218 218 handlers = []
219 219 handlers.extend(load_handlers('tree.handlers'))
220 220 handlers.extend([(r"/login", settings['login_handler_class'])])
221 221 handlers.extend([(r"/logout", settings['logout_handler_class'])])
222 222 handlers.extend(load_handlers('files.handlers'))
223 223 handlers.extend(load_handlers('notebook.handlers'))
224 224 handlers.extend(load_handlers('nbconvert.handlers'))
225 225 handlers.extend(load_handlers('kernelspecs.handlers'))
226 226 handlers.extend(load_handlers('edit.handlers'))
227 227 handlers.extend(load_handlers('services.config.handlers'))
228 228 handlers.extend(load_handlers('services.kernels.handlers'))
229 229 handlers.extend(load_handlers('services.contents.handlers'))
230 230 handlers.extend(load_handlers('services.clusters.handlers'))
231 231 handlers.extend(load_handlers('services.sessions.handlers'))
232 232 handlers.extend(load_handlers('services.nbconvert.handlers'))
233 233 handlers.extend(load_handlers('services.kernelspecs.handlers'))
234 234 handlers.extend(load_handlers('services.security.handlers'))
235 235 handlers.append(
236 236 (r"/nbextensions/(.*)", FileFindHandler, {
237 237 'path': settings['nbextensions_path'],
238 238 'no_cache_paths': ['/'], # don't cache anything in nbextensions
239 239 }),
240 240 )
241 241 # register base handlers last
242 242 handlers.extend(load_handlers('base.handlers'))
243 243 # set the URL that will be redirected from `/`
244 244 handlers.append(
245 245 (r'/?', web.RedirectHandler, {
246 246 'url' : settings['default_url'],
247 247 'permanent': False, # want 302, not 301
248 248 })
249 249 )
250 250 # prepend base_url onto the patterns that we match
251 251 new_handlers = []
252 252 for handler in handlers:
253 253 pattern = url_path_join(settings['base_url'], handler[0])
254 254 new_handler = tuple([pattern] + list(handler[1:]))
255 255 new_handlers.append(new_handler)
256 256 # add 404 on the end, which will catch everything that falls through
257 257 new_handlers.append((r'(.*)', Template404))
258 258 return new_handlers
259 259
260 260
261 261 class NbserverListApp(BaseIPythonApplication):
262 262
263 263 description="List currently running notebook servers in this profile."
264 264
265 265 flags = dict(
266 266 json=({'NbserverListApp': {'json': True}},
267 267 "Produce machine-readable JSON output."),
268 268 )
269 269
270 270 json = Bool(False, config=True,
271 271 help="If True, each line of output will be a JSON object with the "
272 272 "details from the server info file.")
273 273
274 274 def start(self):
275 275 if not self.json:
276 276 print("Currently running servers:")
277 277 for serverinfo in list_running_servers(self.profile):
278 278 if self.json:
279 279 print(json.dumps(serverinfo))
280 280 else:
281 281 print(serverinfo['url'], "::", serverinfo['notebook_dir'])
282 282
283 283 #-----------------------------------------------------------------------------
284 284 # Aliases and Flags
285 285 #-----------------------------------------------------------------------------
286 286
287 287 flags = dict(base_flags)
288 288 flags['no-browser']=(
289 289 {'NotebookApp' : {'open_browser' : False}},
290 290 "Don't open the notebook in a browser after startup."
291 291 )
292 292 flags['pylab']=(
293 293 {'NotebookApp' : {'pylab' : 'warn'}},
294 294 "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib."
295 295 )
296 296 flags['no-mathjax']=(
297 297 {'NotebookApp' : {'enable_mathjax' : False}},
298 298 """Disable MathJax
299 299
300 300 MathJax is the javascript library IPython uses to render math/LaTeX. It is
301 301 very large, so you may want to disable it if you have a slow internet
302 302 connection, or for offline use of the notebook.
303 303
304 304 When disabled, equations etc. will appear as their untransformed TeX source.
305 305 """
306 306 )
307 307
308 308 # Add notebook manager flags
309 309 flags.update(boolean_flag('script', 'FileContentsManager.save_script',
310 310 'DEPRECATED, IGNORED',
311 311 'DEPRECATED, IGNORED'))
312 312
313 313 aliases = dict(base_aliases)
314 314
315 315 aliases.update({
316 316 'ip': 'NotebookApp.ip',
317 317 'port': 'NotebookApp.port',
318 318 'port-retries': 'NotebookApp.port_retries',
319 319 'transport': 'KernelManager.transport',
320 320 'keyfile': 'NotebookApp.keyfile',
321 321 'certfile': 'NotebookApp.certfile',
322 322 'notebook-dir': 'NotebookApp.notebook_dir',
323 323 'browser': 'NotebookApp.browser',
324 324 'pylab': 'NotebookApp.pylab',
325 325 })
326 326
327 327 #-----------------------------------------------------------------------------
328 328 # NotebookApp
329 329 #-----------------------------------------------------------------------------
330 330
331 331 class NotebookApp(BaseIPythonApplication):
332 332
333 333 name = 'ipython-notebook'
334 334
335 335 description = """
336 336 The IPython HTML Notebook.
337 337
338 338 This launches a Tornado based HTML Notebook Server that serves up an
339 339 HTML5/Javascript Notebook client.
340 340 """
341 341 examples = _examples
342 342 aliases = aliases
343 343 flags = flags
344 344
345 345 classes = [
346 346 KernelManager, ProfileDir, Session, MappingKernelManager,
347 347 ContentsManager, FileContentsManager, NotebookNotary,
348 348 KernelSpecManager,
349 349 ]
350 350 flags = Dict(flags)
351 351 aliases = Dict(aliases)
352 352
353 353 subcommands = dict(
354 354 list=(NbserverListApp, NbserverListApp.description.splitlines()[0]),
355 355 )
356 356
357 357 ipython_kernel_argv = List(Unicode)
358 358
359 359 _log_formatter_cls = LogFormatter
360 360
361 361 def _log_level_default(self):
362 362 return logging.INFO
363 363
364 364 def _log_datefmt_default(self):
365 365 """Exclude date from default date format"""
366 366 return "%H:%M:%S"
367 367
368 368 def _log_format_default(self):
369 369 """override default log format to include time"""
370 370 return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s"
371 371
372 372 # create requested profiles by default, if they don't exist:
373 373 auto_create = Bool(True)
374 374
375 375 # file to be opened in the notebook server
376 376 file_to_run = Unicode('', config=True)
377 377
378 378 # Network related information
379 379
380 380 allow_origin = Unicode('', config=True,
381 381 help="""Set the Access-Control-Allow-Origin header
382 382
383 383 Use '*' to allow any origin to access your server.
384 384
385 385 Takes precedence over allow_origin_pat.
386 386 """
387 387 )
388 388
389 389 allow_origin_pat = Unicode('', config=True,
390 390 help="""Use a regular expression for the Access-Control-Allow-Origin header
391 391
392 392 Requests from an origin matching the expression will get replies with:
393 393
394 394 Access-Control-Allow-Origin: origin
395 395
396 396 where `origin` is the origin of the request.
397 397
398 398 Ignored if allow_origin is set.
399 399 """
400 400 )
401 401
402 402 allow_credentials = Bool(False, config=True,
403 403 help="Set the Access-Control-Allow-Credentials: true header"
404 404 )
405 405
406 406 default_url = Unicode('/tree', config=True,
407 407 help="The default URL to redirect to from `/`"
408 408 )
409 409
410 410 ip = Unicode('localhost', config=True,
411 411 help="The IP address the notebook server will listen on."
412 412 )
413 413
414 414 def _ip_changed(self, name, old, new):
415 415 if new == u'*': self.ip = u''
416 416
417 417 port = Integer(8888, config=True,
418 418 help="The port the notebook server will listen on."
419 419 )
420 420 port_retries = Integer(50, config=True,
421 421 help="The number of additional ports to try if the specified port is not available."
422 422 )
423 423
424 424 certfile = Unicode(u'', config=True,
425 425 help="""The full path to an SSL/TLS certificate file."""
426 426 )
427 427
428 428 keyfile = Unicode(u'', config=True,
429 429 help="""The full path to a private key file for usage with SSL/TLS."""
430 430 )
431 431
432 432 cookie_secret_file = Unicode(config=True,
433 433 help="""The file where the cookie secret is stored."""
434 434 )
435 435 def _cookie_secret_file_default(self):
436 436 if self.profile_dir is None:
437 437 return ''
438 438 return os.path.join(self.profile_dir.security_dir, 'notebook_cookie_secret')
439 439
440 440 cookie_secret = Bytes(b'', config=True,
441 441 help="""The random bytes used to secure cookies.
442 442 By default this is a new random number every time you start the Notebook.
443 443 Set it to a value in a config file to enable logins to persist across server sessions.
444 444
445 445 Note: Cookie secrets should be kept private, do not share config files with
446 446 cookie_secret stored in plaintext (you can read the value from a file).
447 447 """
448 448 )
449 449 def _cookie_secret_default(self):
450 450 if os.path.exists(self.cookie_secret_file):
451 451 with io.open(self.cookie_secret_file, 'rb') as f:
452 452 return f.read()
453 453 else:
454 454 secret = base64.encodestring(os.urandom(1024))
455 455 self._write_cookie_secret_file(secret)
456 456 return secret
457 457
458 458 def _write_cookie_secret_file(self, secret):
459 459 """write my secret to my secret_file"""
460 460 self.log.info("Writing notebook server cookie secret to %s", self.cookie_secret_file)
461 461 with io.open(self.cookie_secret_file, 'wb') as f:
462 462 f.write(secret)
463 463 try:
464 464 os.chmod(self.cookie_secret_file, 0o600)
465 465 except OSError:
466 466 self.log.warn(
467 467 "Could not set permissions on %s",
468 468 self.cookie_secret_file
469 469 )
470 470
471 471 password = Unicode(u'', config=True,
472 472 help="""Hashed password to use for web authentication.
473 473
474 474 To generate, type in a python/IPython shell:
475 475
476 476 from IPython.lib import passwd; passwd()
477 477
478 478 The string should be of the form type:salt:hashed-password.
479 479 """
480 480 )
481 481
482 482 open_browser = Bool(True, config=True,
483 483 help="""Whether to open in a browser after starting.
484 484 The specific browser used is platform dependent and
485 485 determined by the python standard library `webbrowser`
486 486 module, unless it is overridden using the --browser
487 487 (NotebookApp.browser) configuration option.
488 488 """)
489 489
490 490 browser = Unicode(u'', config=True,
491 491 help="""Specify what command to use to invoke a web
492 492 browser when opening the notebook. If not specified, the
493 493 default browser will be determined by the `webbrowser`
494 494 standard library module, which allows setting of the
495 495 BROWSER environment variable to override it.
496 496 """)
497 497
498 498 webapp_settings = Dict(config=True,
499 499 help="DEPRECATED, use tornado_settings"
500 500 )
501 501 def _webapp_settings_changed(self, name, old, new):
502 502 self.log.warn("\n webapp_settings is deprecated, use tornado_settings.\n")
503 503 self.tornado_settings = new
504 504
505 505 tornado_settings = Dict(config=True,
506 506 help="Supply overrides for the tornado.web.Application that the "
507 507 "IPython notebook uses.")
508 508
509 509 ssl_options = Dict(config=True,
510 510 help="""Supply SSL options for the tornado HTTPServer.
511 511 See the tornado docs for details.""")
512 512
513 513 jinja_environment_options = Dict(config=True,
514 514 help="Supply extra arguments that will be passed to Jinja environment.")
515 515
516 516 enable_mathjax = Bool(True, config=True,
517 517 help="""Whether to enable MathJax for typesetting math/TeX
518 518
519 519 MathJax is the javascript library IPython uses to render math/LaTeX. It is
520 520 very large, so you may want to disable it if you have a slow internet
521 521 connection, or for offline use of the notebook.
522 522
523 523 When disabled, equations etc. will appear as their untransformed TeX source.
524 524 """
525 525 )
526 526 def _enable_mathjax_changed(self, name, old, new):
527 527 """set mathjax url to empty if mathjax is disabled"""
528 528 if not new:
529 529 self.mathjax_url = u''
530 530
531 531 base_url = Unicode('/', config=True,
532 532 help='''The base URL for the notebook server.
533 533
534 534 Leading and trailing slashes can be omitted,
535 535 and will automatically be added.
536 536 ''')
537 537 def _base_url_changed(self, name, old, new):
538 538 if not new.startswith('/'):
539 539 self.base_url = '/'+new
540 540 elif not new.endswith('/'):
541 541 self.base_url = new+'/'
542 542
543 543 base_project_url = Unicode('/', config=True, help="""DEPRECATED use base_url""")
544 544 def _base_project_url_changed(self, name, old, new):
545 545 self.log.warn("base_project_url is deprecated, use base_url")
546 546 self.base_url = new
547 547
548 548 extra_static_paths = List(Unicode, config=True,
549 549 help="""Extra paths to search for serving static files.
550 550
551 551 This allows adding javascript/css to be available from the notebook server machine,
552 552 or overriding individual files in the IPython"""
553 553 )
554 554 def _extra_static_paths_default(self):
555 555 return [os.path.join(self.profile_dir.location, 'static')]
556 556
557 557 @property
558 558 def static_file_path(self):
559 559 """return extra paths + the default location"""
560 560 return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH]
561 561
562 562 extra_template_paths = List(Unicode, config=True,
563 563 help="""Extra paths to search for serving jinja templates.
564 564
565 565 Can be used to override templates from IPython.html.templates."""
566 566 )
567 567 def _extra_template_paths_default(self):
568 568 return []
569 569
570 570 @property
571 571 def template_file_path(self):
572 572 """return extra paths + the default locations"""
573 573 return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST
574 574
575 575 extra_nbextensions_path = List(Unicode, config=True,
576 576 help="""extra paths to look for Javascript notebook extensions"""
577 577 )
578 578
579 579 @property
580 580 def nbextensions_path(self):
581 581 """The path to look for Javascript notebook extensions"""
582 582 return self.extra_nbextensions_path + [os.path.join(get_ipython_dir(), 'nbextensions')] + SYSTEM_NBEXTENSIONS_DIRS
583 583
584 584 websocket_url = Unicode("", config=True,
585 585 help="""The base URL for websockets,
586 586 if it differs from the HTTP server (hint: it almost certainly doesn't).
587 587
588 588 Should be in the form of an HTTP origin: ws[s]://hostname[:port]
589 589 """
590 590 )
591 591 mathjax_url = Unicode("", config=True,
592 592 help="""The url for MathJax.js."""
593 593 )
594 594 def _mathjax_url_default(self):
595 595 if not self.enable_mathjax:
596 596 return u''
597 597 static_url_prefix = self.tornado_settings.get("static_url_prefix",
598 598 url_path_join(self.base_url, "static")
599 599 )
600 600
601 601 # try local mathjax, either in nbextensions/mathjax or static/mathjax
602 602 for (url_prefix, search_path) in [
603 603 (url_path_join(self.base_url, "nbextensions"), self.nbextensions_path),
604 604 (static_url_prefix, self.static_file_path),
605 605 ]:
606 606 self.log.debug("searching for local mathjax in %s", search_path)
607 607 try:
608 608 mathjax = filefind(os.path.join('mathjax', 'MathJax.js'), search_path)
609 609 except IOError:
610 610 continue
611 611 else:
612 612 url = url_path_join(url_prefix, u"mathjax/MathJax.js")
613 613 self.log.info("Serving local MathJax from %s at %s", mathjax, url)
614 614 return url
615 615
616 616 # no local mathjax, serve from CDN
617 617 url = u"https://cdn.mathjax.org/mathjax/latest/MathJax.js"
618 618 self.log.info("Using MathJax from CDN: %s", url)
619 619 return url
620 620
621 621 def _mathjax_url_changed(self, name, old, new):
622 622 if new and not self.enable_mathjax:
623 623 # enable_mathjax=False overrides mathjax_url
624 624 self.mathjax_url = u''
625 625 else:
626 626 self.log.info("Using MathJax: %s", new)
627 627
628 628 contents_manager_class = Type(
629 629 default_value=FileContentsManager,
630 630 klass=ContentsManager,
631 631 config=True,
632 632 help='The notebook manager class to use.'
633 633 )
634 634 kernel_manager_class = Type(
635 635 default_value=MappingKernelManager,
636 636 config=True,
637 637 help='The kernel manager class to use.'
638 638 )
639 639 session_manager_class = Type(
640 640 default_value=SessionManager,
641 641 config=True,
642 642 help='The session manager class to use.'
643 643 )
644 644 cluster_manager_class = Type(
645 645 default_value=ClusterManager,
646 646 config=True,
647 647 help='The cluster manager class to use.'
648 648 )
649 649
650 650 config_manager_class = Type(
651 651 default_value=ConfigManager,
652 652 config = True,
653 653 help='The config manager class to use'
654 654 )
655 655
656 656 kernel_spec_manager = Instance(KernelSpecManager)
657 657
658 658 kernel_spec_manager_class = Type(
659 659 default_value=KernelSpecManager,
660 660 config=True,
661 661 help="""
662 662 The kernel spec manager class to use. Should be a subclass
663 663 of `IPython.kernel.kernelspec.KernelSpecManager`.
664 664
665 665 The Api of KernelSpecManager is provisional and might change
666 666 without warning between this version of IPython and the next stable one.
667 667 """
668 668 )
669 669
670 670 login_handler_class = Type(
671 671 default_value=LoginHandler,
672 672 klass=web.RequestHandler,
673 673 config=True,
674 674 help='The login handler class to use.',
675 675 )
676 676
677 677 logout_handler_class = Type(
678 678 default_value=LogoutHandler,
679 679 klass=web.RequestHandler,
680 680 config=True,
681 681 help='The logout handler class to use.',
682 682 )
683 683
684 684 trust_xheaders = Bool(False, config=True,
685 685 help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers"
686 686 "sent by the upstream reverse proxy. Necessary if the proxy handles SSL")
687 687 )
688 688
689 689 info_file = Unicode()
690 690
691 691 def _info_file_default(self):
692 692 info_file = "nbserver-%s.json"%os.getpid()
693 693 return os.path.join(self.profile_dir.security_dir, info_file)
694 694
695 695 pylab = Unicode('disabled', config=True,
696 696 help="""
697 697 DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib.
698 698 """
699 699 )
700 700 def _pylab_changed(self, name, old, new):
701 701 """when --pylab is specified, display a warning and exit"""
702 702 if new != 'warn':
703 703 backend = ' %s' % new
704 704 else:
705 705 backend = ''
706 706 self.log.error("Support for specifying --pylab on the command line has been removed.")
707 707 self.log.error(
708 708 "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.".format(backend)
709 709 )
710 710 self.exit(1)
711 711
712 712 notebook_dir = Unicode(config=True,
713 713 help="The directory to use for notebooks and kernels."
714 714 )
715 715
716 716 def _notebook_dir_default(self):
717 717 if self.file_to_run:
718 718 return os.path.dirname(os.path.abspath(self.file_to_run))
719 719 else:
720 720 return py3compat.getcwd()
721 721
722 722 def _notebook_dir_changed(self, name, old, new):
723 723 """Do a bit of validation of the notebook dir."""
724 724 if not os.path.isabs(new):
725 725 # If we receive a non-absolute path, make it absolute.
726 726 self.notebook_dir = os.path.abspath(new)
727 727 return
728 728 if not os.path.isdir(new):
729 729 raise TraitError("No such notebook dir: %r" % new)
730 730
731 731 # setting App.notebook_dir implies setting notebook and kernel dirs as well
732 732 self.config.FileContentsManager.root_dir = new
733 733 self.config.MappingKernelManager.root_dir = new
734 734
735 735 server_extensions = List(Unicode(), config=True,
736 736 help=("Python modules to load as notebook server extensions. "
737 737 "This is an experimental API, and may change in future releases.")
738 738 )
739 739
740 740 def parse_command_line(self, argv=None):
741 741 super(NotebookApp, self).parse_command_line(argv)
742 742
743 743 if self.extra_args:
744 744 arg0 = self.extra_args[0]
745 745 f = os.path.abspath(arg0)
746 746 self.argv.remove(arg0)
747 747 if not os.path.exists(f):
748 748 self.log.critical("No such file or directory: %s", f)
749 749 self.exit(1)
750 750
751 751 # Use config here, to ensure that it takes higher priority than
752 752 # anything that comes from the profile.
753 753 c = Config()
754 754 if os.path.isdir(f):
755 755 c.NotebookApp.notebook_dir = f
756 756 elif os.path.isfile(f):
757 757 c.NotebookApp.file_to_run = f
758 758 self.update_config(c)
759 759
760 760 def init_kernel_argv(self):
761 761 """add the profile-dir to arguments to be passed to IPython kernels"""
762 762 # FIXME: remove special treatment of IPython kernels
763 763 # Kernel should get *absolute* path to profile directory
764 764 self.ipython_kernel_argv = ["--profile-dir", self.profile_dir.location]
765 765
766 766 def init_configurables(self):
767 # force Session default to be secure
768 default_secure(self.config)
769
770 767 self.kernel_spec_manager = self.kernel_spec_manager_class(
771 768 parent=self,
772 769 ipython_dir=self.ipython_dir,
773 770 )
774 771 self.kernel_manager = self.kernel_manager_class(
775 772 parent=self,
776 773 log=self.log,
777 774 ipython_kernel_argv=self.ipython_kernel_argv,
778 775 connection_dir=self.profile_dir.security_dir,
779 776 )
780 777 self.contents_manager = self.contents_manager_class(
781 778 parent=self,
782 779 log=self.log,
783 780 )
784 781 self.session_manager = self.session_manager_class(
785 782 parent=self,
786 783 log=self.log,
787 784 kernel_manager=self.kernel_manager,
788 785 contents_manager=self.contents_manager,
789 786 )
790 787 self.cluster_manager = self.cluster_manager_class(
791 788 parent=self,
792 789 log=self.log,
793 790 )
794 791
795 792 self.config_manager = self.config_manager_class(
796 793 parent=self,
797 794 log=self.log,
798 795 profile_dir=self.profile_dir.location,
799 796 )
800 797
801 798 def init_logging(self):
802 799 # This prevents double log messages because tornado use a root logger that
803 800 # self.log is a child of. The logging module dipatches log messages to a log
804 801 # and all of its ancenstors until propagate is set to False.
805 802 self.log.propagate = False
806 803
807 804 for log in app_log, access_log, gen_log:
808 805 # consistent log output name (NotebookApp instead of tornado.access, etc.)
809 806 log.name = self.log.name
810 807 # hook up tornado 3's loggers to our app handlers
811 808 logger = logging.getLogger('tornado')
812 809 logger.propagate = True
813 810 logger.parent = self.log
814 811 logger.setLevel(self.log.level)
815 812
816 813 def init_webapp(self):
817 814 """initialize tornado webapp and httpserver"""
818 815 self.tornado_settings['allow_origin'] = self.allow_origin
819 816 if self.allow_origin_pat:
820 817 self.tornado_settings['allow_origin_pat'] = re.compile(self.allow_origin_pat)
821 818 self.tornado_settings['allow_credentials'] = self.allow_credentials
822 819 # ensure default_url starts with base_url
823 820 if not self.default_url.startswith(self.base_url):
824 821 self.default_url = url_path_join(self.base_url, self.default_url)
825 822
826 823 self.web_app = NotebookWebApplication(
827 824 self, self.kernel_manager, self.contents_manager,
828 825 self.cluster_manager, self.session_manager, self.kernel_spec_manager,
829 826 self.config_manager,
830 827 self.log, self.base_url, self.default_url, self.tornado_settings,
831 828 self.jinja_environment_options
832 829 )
833 830 ssl_options = self.ssl_options
834 831 if self.certfile:
835 832 ssl_options['certfile'] = self.certfile
836 833 if self.keyfile:
837 834 ssl_options['keyfile'] = self.keyfile
838 835 if not ssl_options:
839 836 # None indicates no SSL config
840 837 ssl_options = None
841 838 self.login_handler_class.validate_security(self, ssl_options=ssl_options)
842 839 self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options,
843 840 xheaders=self.trust_xheaders)
844 841
845 842 success = None
846 843 for port in random_ports(self.port, self.port_retries+1):
847 844 try:
848 845 self.http_server.listen(port, self.ip)
849 846 except socket.error as e:
850 847 if e.errno == errno.EADDRINUSE:
851 848 self.log.info('The port %i is already in use, trying another random port.' % port)
852 849 continue
853 850 elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)):
854 851 self.log.warn("Permission to listen on port %i denied" % port)
855 852 continue
856 853 else:
857 854 raise
858 855 else:
859 856 self.port = port
860 857 success = True
861 858 break
862 859 if not success:
863 860 self.log.critical('ERROR: the notebook server could not be started because '
864 861 'no available port could be found.')
865 862 self.exit(1)
866 863
867 864 @property
868 865 def display_url(self):
869 866 ip = self.ip if self.ip else '[all ip addresses on your system]'
870 867 return self._url(ip)
871 868
872 869 @property
873 870 def connection_url(self):
874 871 ip = self.ip if self.ip else 'localhost'
875 872 return self._url(ip)
876 873
877 874 def _url(self, ip):
878 875 proto = 'https' if self.certfile else 'http'
879 876 return "%s://%s:%i%s" % (proto, ip, self.port, self.base_url)
880 877
881 878 def init_terminals(self):
882 879 try:
883 880 from .terminal import initialize
884 881 initialize(self.web_app)
885 882 self.web_app.settings['terminals_available'] = True
886 883 except ImportError as e:
887 884 log = self.log.debug if sys.platform == 'win32' else self.log.warn
888 885 log("Terminals not available (error was %s)", e)
889 886
890 887 def init_signal(self):
891 888 if not sys.platform.startswith('win'):
892 889 signal.signal(signal.SIGINT, self._handle_sigint)
893 890 signal.signal(signal.SIGTERM, self._signal_stop)
894 891 if hasattr(signal, 'SIGUSR1'):
895 892 # Windows doesn't support SIGUSR1
896 893 signal.signal(signal.SIGUSR1, self._signal_info)
897 894 if hasattr(signal, 'SIGINFO'):
898 895 # only on BSD-based systems
899 896 signal.signal(signal.SIGINFO, self._signal_info)
900 897
901 898 def _handle_sigint(self, sig, frame):
902 899 """SIGINT handler spawns confirmation dialog"""
903 900 # register more forceful signal handler for ^C^C case
904 901 signal.signal(signal.SIGINT, self._signal_stop)
905 902 # request confirmation dialog in bg thread, to avoid
906 903 # blocking the App
907 904 thread = threading.Thread(target=self._confirm_exit)
908 905 thread.daemon = True
909 906 thread.start()
910 907
911 908 def _restore_sigint_handler(self):
912 909 """callback for restoring original SIGINT handler"""
913 910 signal.signal(signal.SIGINT, self._handle_sigint)
914 911
915 912 def _confirm_exit(self):
916 913 """confirm shutdown on ^C
917 914
918 915 A second ^C, or answering 'y' within 5s will cause shutdown,
919 916 otherwise original SIGINT handler will be restored.
920 917
921 918 This doesn't work on Windows.
922 919 """
923 920 info = self.log.info
924 921 info('interrupted')
925 922 print(self.notebook_info())
926 923 sys.stdout.write("Shutdown this notebook server (y/[n])? ")
927 924 sys.stdout.flush()
928 925 r,w,x = select.select([sys.stdin], [], [], 5)
929 926 if r:
930 927 line = sys.stdin.readline()
931 928 if line.lower().startswith('y') and 'n' not in line.lower():
932 929 self.log.critical("Shutdown confirmed")
933 930 ioloop.IOLoop.current().stop()
934 931 return
935 932 else:
936 933 print("No answer for 5s:", end=' ')
937 934 print("resuming operation...")
938 935 # no answer, or answer is no:
939 936 # set it back to original SIGINT handler
940 937 # use IOLoop.add_callback because signal.signal must be called
941 938 # from main thread
942 939 ioloop.IOLoop.current().add_callback(self._restore_sigint_handler)
943 940
944 941 def _signal_stop(self, sig, frame):
945 942 self.log.critical("received signal %s, stopping", sig)
946 943 ioloop.IOLoop.current().stop()
947 944
948 945 def _signal_info(self, sig, frame):
949 946 print(self.notebook_info())
950 947
951 948 def init_components(self):
952 949 """Check the components submodule, and warn if it's unclean"""
953 950 status = submodule.check_submodule_status()
954 951 if status == 'missing':
955 952 self.log.warn("components submodule missing, running `git submodule update`")
956 953 submodule.update_submodules(submodule.ipython_parent())
957 954 elif status == 'unclean':
958 955 self.log.warn("components submodule unclean, you may see 404s on static/components")
959 956 self.log.warn("run `setup.py submodule` or `git submodule update` to update")
960 957
961 958 def init_server_extensions(self):
962 959 """Load any extensions specified by config.
963 960
964 961 Import the module, then call the load_jupyter_server_extension function,
965 962 if one exists.
966 963
967 964 The extension API is experimental, and may change in future releases.
968 965 """
969 966 for modulename in self.server_extensions:
970 967 try:
971 968 mod = importlib.import_module(modulename)
972 969 func = getattr(mod, 'load_jupyter_server_extension', None)
973 970 if func is not None:
974 971 func(self)
975 972 except Exception:
976 973 self.log.warn("Error loading server extension %s", modulename,
977 974 exc_info=True)
978 975
979 976 @catch_config_error
980 977 def initialize(self, argv=None):
981 978 super(NotebookApp, self).initialize(argv)
982 979 self.init_logging()
983 980 self.init_kernel_argv()
984 981 self.init_configurables()
985 982 self.init_components()
986 983 self.init_webapp()
987 984 self.init_terminals()
988 985 self.init_signal()
989 986 self.init_server_extensions()
990 987
991 988 def cleanup_kernels(self):
992 989 """Shutdown all kernels.
993 990
994 991 The kernels will shutdown themselves when this process no longer exists,
995 992 but explicit shutdown allows the KernelManagers to cleanup the connection files.
996 993 """
997 994 self.log.info('Shutting down kernels')
998 995 self.kernel_manager.shutdown_all()
999 996
1000 997 def notebook_info(self):
1001 998 "Return the current working directory and the server url information"
1002 999 info = self.contents_manager.info_string() + "\n"
1003 1000 info += "%d active kernels \n" % len(self.kernel_manager._kernels)
1004 1001 return info + "The IPython Notebook is running at: %s" % self.display_url
1005 1002
1006 1003 def server_info(self):
1007 1004 """Return a JSONable dict of information about this server."""
1008 1005 return {'url': self.connection_url,
1009 1006 'hostname': self.ip if self.ip else 'localhost',
1010 1007 'port': self.port,
1011 1008 'secure': bool(self.certfile),
1012 1009 'base_url': self.base_url,
1013 1010 'notebook_dir': os.path.abspath(self.notebook_dir),
1014 1011 'pid': os.getpid()
1015 1012 }
1016 1013
1017 1014 def write_server_info_file(self):
1018 1015 """Write the result of server_info() to the JSON file info_file."""
1019 1016 with open(self.info_file, 'w') as f:
1020 1017 json.dump(self.server_info(), f, indent=2)
1021 1018
1022 1019 def remove_server_info_file(self):
1023 1020 """Remove the nbserver-<pid>.json file created for this server.
1024 1021
1025 1022 Ignores the error raised when the file has already been removed.
1026 1023 """
1027 1024 try:
1028 1025 os.unlink(self.info_file)
1029 1026 except OSError as e:
1030 1027 if e.errno != errno.ENOENT:
1031 1028 raise
1032 1029
1033 1030 def start(self):
1034 1031 """ Start the IPython Notebook server app, after initialization
1035 1032
1036 1033 This method takes no arguments so all configuration and initialization
1037 1034 must be done prior to calling this method."""
1038 1035 if self.subapp is not None:
1039 1036 return self.subapp.start()
1040 1037
1041 1038 info = self.log.info
1042 1039 for line in self.notebook_info().split("\n"):
1043 1040 info(line)
1044 1041 info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).")
1045 1042
1046 1043 self.write_server_info_file()
1047 1044
1048 1045 if self.open_browser or self.file_to_run:
1049 1046 try:
1050 1047 browser = webbrowser.get(self.browser or None)
1051 1048 except webbrowser.Error as e:
1052 1049 self.log.warn('No web browser found: %s.' % e)
1053 1050 browser = None
1054 1051
1055 1052 if self.file_to_run:
1056 1053 if not os.path.exists(self.file_to_run):
1057 1054 self.log.critical("%s does not exist" % self.file_to_run)
1058 1055 self.exit(1)
1059 1056
1060 1057 relpath = os.path.relpath(self.file_to_run, self.notebook_dir)
1061 1058 uri = url_path_join('notebooks', *relpath.split(os.sep))
1062 1059 else:
1063 1060 uri = 'tree'
1064 1061 if browser:
1065 1062 b = lambda : browser.open(url_path_join(self.connection_url, uri),
1066 1063 new=2)
1067 1064 threading.Thread(target=b).start()
1068 1065
1069 1066 self.io_loop = ioloop.IOLoop.current()
1070 1067 if sys.platform.startswith('win'):
1071 1068 # add no-op to wake every 5s
1072 1069 # to handle signals that may be ignored by the inner loop
1073 1070 pc = ioloop.PeriodicCallback(lambda : None, 5000)
1074 1071 pc.start()
1075 1072 try:
1076 1073 self.io_loop.start()
1077 1074 except KeyboardInterrupt:
1078 1075 info("Interrupted...")
1079 1076 finally:
1080 1077 self.cleanup_kernels()
1081 1078 self.remove_server_info_file()
1082 1079
1083 1080 def stop(self):
1084 1081 def _stop():
1085 1082 self.http_server.stop()
1086 1083 self.io_loop.stop()
1087 1084 self.io_loop.add_callback(_stop)
1088 1085
1089 1086
1090 1087 def list_running_servers(profile='default'):
1091 1088 """Iterate over the server info files of running notebook servers.
1092 1089
1093 1090 Given a profile name, find nbserver-* files in the security directory of
1094 1091 that profile, and yield dicts of their information, each one pertaining to
1095 1092 a currently running notebook server instance.
1096 1093 """
1097 1094 pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), name=profile)
1098 1095 for file in os.listdir(pd.security_dir):
1099 1096 if file.startswith('nbserver-'):
1100 1097 with io.open(os.path.join(pd.security_dir, file), encoding='utf-8') as f:
1101 1098 info = json.load(f)
1102 1099
1103 1100 # Simple check whether that process is really still running
1104 1101 # Also remove leftover files from IPython 2.x without a pid field
1105 1102 if ('pid' in info) and check_pid(info['pid']):
1106 1103 yield info
1107 1104 else:
1108 1105 # If the process has died, try to delete its info file
1109 1106 try:
1110 1107 os.unlink(file)
1111 1108 except OSError:
1112 1109 pass # TODO: This should warn or log or something
1113 1110 #-----------------------------------------------------------------------------
1114 1111 # Main entry point
1115 1112 #-----------------------------------------------------------------------------
1116 1113
1117 1114 launch_new_instance = NotebookApp.launch_instance
1118 1115
@@ -1,286 +1,288 b''
1 1 """Tornado handlers for kernels."""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 import json
7 7 import logging
8 8 from tornado import gen, web
9 9 from tornado.concurrent import Future
10 10 from tornado.ioloop import IOLoop
11 11
12 12 from IPython.utils.jsonutil import date_default
13 13 from IPython.utils.py3compat import cast_unicode
14 14 from IPython.html.utils import url_path_join, url_escape
15 15
16 16 from ...base.handlers import IPythonHandler, json_errors
17 17 from ...base.zmqhandlers import AuthenticatedZMQStreamHandler, deserialize_binary_message
18 18
19 19 from IPython.core.release import kernel_protocol_version
20 20
21 21 class MainKernelHandler(IPythonHandler):
22 22
23 23 @web.authenticated
24 24 @json_errors
25 25 def get(self):
26 26 km = self.kernel_manager
27 27 self.finish(json.dumps(km.list_kernels()))
28 28
29 29 @web.authenticated
30 30 @json_errors
31 31 def post(self):
32 32 km = self.kernel_manager
33 33 model = self.get_json_body()
34 34 if model is None:
35 35 model = {
36 36 'name': km.default_kernel_name
37 37 }
38 38 else:
39 39 model.setdefault('name', km.default_kernel_name)
40 40
41 41 kernel_id = km.start_kernel(kernel_name=model['name'])
42 42 model = km.kernel_model(kernel_id)
43 43 location = url_path_join(self.base_url, 'api', 'kernels', kernel_id)
44 44 self.set_header('Location', url_escape(location))
45 45 self.set_status(201)
46 46 self.finish(json.dumps(model))
47 47
48 48
49 49 class KernelHandler(IPythonHandler):
50 50
51 51 SUPPORTED_METHODS = ('DELETE', 'GET')
52 52
53 53 @web.authenticated
54 54 @json_errors
55 55 def get(self, kernel_id):
56 56 km = self.kernel_manager
57 57 km._check_kernel_id(kernel_id)
58 58 model = km.kernel_model(kernel_id)
59 59 self.finish(json.dumps(model))
60 60
61 61 @web.authenticated
62 62 @json_errors
63 63 def delete(self, kernel_id):
64 64 km = self.kernel_manager
65 65 km.shutdown_kernel(kernel_id)
66 66 self.set_status(204)
67 67 self.finish()
68 68
69 69
70 70 class KernelActionHandler(IPythonHandler):
71 71
72 72 @web.authenticated
73 73 @json_errors
74 74 def post(self, kernel_id, action):
75 75 km = self.kernel_manager
76 76 if action == 'interrupt':
77 77 km.interrupt_kernel(kernel_id)
78 78 self.set_status(204)
79 79 if action == 'restart':
80 80 km.restart_kernel(kernel_id)
81 81 model = km.kernel_model(kernel_id)
82 82 self.set_header('Location', '{0}api/kernels/{1}'.format(self.base_url, kernel_id))
83 83 self.write(json.dumps(model))
84 84 self.finish()
85 85
86 86
87 87 class ZMQChannelsHandler(AuthenticatedZMQStreamHandler):
88 88
89 89 @property
90 90 def kernel_info_timeout(self):
91 91 return self.settings.get('kernel_info_timeout', 10)
92 92
93 93 def __repr__(self):
94 94 return "%s(%s)" % (self.__class__.__name__, getattr(self, 'kernel_id', 'uninitialized'))
95 95
96 96 def create_stream(self):
97 97 km = self.kernel_manager
98 98 identity = self.session.bsession
99 99 for channel in ('shell', 'iopub', 'stdin'):
100 100 meth = getattr(km, 'connect_' + channel)
101 101 self.channels[channel] = stream = meth(self.kernel_id, identity=identity)
102 102 stream.channel = channel
103 103 km.add_restart_callback(self.kernel_id, self.on_kernel_restarted)
104 104 km.add_restart_callback(self.kernel_id, self.on_restart_failed, 'dead')
105 105
106 106 def request_kernel_info(self):
107 107 """send a request for kernel_info"""
108 108 km = self.kernel_manager
109 109 kernel = km.get_kernel(self.kernel_id)
110 110 try:
111 111 # check for previous request
112 112 future = kernel._kernel_info_future
113 113 except AttributeError:
114 114 self.log.debug("Requesting kernel info from %s", self.kernel_id)
115 115 # Create a kernel_info channel to query the kernel protocol version.
116 116 # This channel will be closed after the kernel_info reply is received.
117 117 if self.kernel_info_channel is None:
118 118 self.kernel_info_channel = km.connect_shell(self.kernel_id)
119 119 self.kernel_info_channel.on_recv(self._handle_kernel_info_reply)
120 120 self.session.send(self.kernel_info_channel, "kernel_info_request")
121 121 # store the future on the kernel, so only one request is sent
122 122 kernel._kernel_info_future = self._kernel_info_future
123 123 else:
124 124 if not future.done():
125 125 self.log.debug("Waiting for pending kernel_info request")
126 126 future.add_done_callback(lambda f: self._finish_kernel_info(f.result()))
127 127 return self._kernel_info_future
128 128
129 129 def _handle_kernel_info_reply(self, msg):
130 130 """process the kernel_info_reply
131 131
132 132 enabling msg spec adaptation, if necessary
133 133 """
134 134 idents,msg = self.session.feed_identities(msg)
135 135 try:
136 136 msg = self.session.deserialize(msg)
137 137 except:
138 138 self.log.error("Bad kernel_info reply", exc_info=True)
139 139 self._kernel_info_future.set_result({})
140 140 return
141 141 else:
142 142 info = msg['content']
143 143 self.log.debug("Received kernel info: %s", info)
144 144 if msg['msg_type'] != 'kernel_info_reply' or 'protocol_version' not in info:
145 145 self.log.error("Kernel info request failed, assuming current %s", info)
146 146 info = {}
147 147 self._finish_kernel_info(info)
148 148
149 149 # close the kernel_info channel, we don't need it anymore
150 150 if self.kernel_info_channel:
151 151 self.kernel_info_channel.close()
152 152 self.kernel_info_channel = None
153 153
154 154 def _finish_kernel_info(self, info):
155 155 """Finish handling kernel_info reply
156 156
157 157 Set up protocol adaptation, if needed,
158 158 and signal that connection can continue.
159 159 """
160 160 protocol_version = info.get('protocol_version', kernel_protocol_version)
161 161 if protocol_version != kernel_protocol_version:
162 162 self.session.adapt_version = int(protocol_version.split('.')[0])
163 163 self.log.info("Adapting to protocol v%s for kernel %s", protocol_version, self.kernel_id)
164 164 if not self._kernel_info_future.done():
165 165 self._kernel_info_future.set_result(info)
166 166
167 167 def initialize(self):
168 168 super(ZMQChannelsHandler, self).initialize()
169 169 self.zmq_stream = None
170 170 self.channels = {}
171 171 self.kernel_id = None
172 172 self.kernel_info_channel = None
173 173 self._kernel_info_future = Future()
174 174
175 175 @gen.coroutine
176 176 def pre_get(self):
177 177 # authenticate first
178 178 super(ZMQChannelsHandler, self).pre_get()
179 179 # then request kernel info, waiting up to a certain time before giving up.
180 180 # We don't want to wait forever, because browsers don't take it well when
181 181 # servers never respond to websocket connection requests.
182 kernel = self.kernel_manager.get_kernel(self.kernel_id)
183 self.session.key = kernel.session.key
182 184 future = self.request_kernel_info()
183 185
184 186 def give_up():
185 187 """Don't wait forever for the kernel to reply"""
186 188 if future.done():
187 189 return
188 190 self.log.warn("Timeout waiting for kernel_info reply from %s", self.kernel_id)
189 191 future.set_result({})
190 192 loop = IOLoop.current()
191 193 loop.add_timeout(loop.time() + self.kernel_info_timeout, give_up)
192 194 # actually wait for it
193 195 yield future
194 196
195 197 @gen.coroutine
196 198 def get(self, kernel_id):
197 199 self.kernel_id = cast_unicode(kernel_id, 'ascii')
198 200 yield super(ZMQChannelsHandler, self).get(kernel_id=kernel_id)
199 201
200 202 def open(self, kernel_id):
201 203 super(ZMQChannelsHandler, self).open()
202 204 try:
203 205 self.create_stream()
204 206 except web.HTTPError as e:
205 207 self.log.error("Error opening stream: %s", e)
206 208 # WebSockets don't response to traditional error codes so we
207 209 # close the connection.
208 210 for channel, stream in self.channels.items():
209 211 if not stream.closed():
210 212 stream.close()
211 213 self.close()
212 214 else:
213 215 for channel, stream in self.channels.items():
214 216 stream.on_recv_stream(self._on_zmq_reply)
215 217
216 218 def on_message(self, msg):
217 219 if not self.channels:
218 220 # already closed, ignore the message
219 221 self.log.debug("Received message on closed websocket %r", msg)
220 222 return
221 223 if isinstance(msg, bytes):
222 224 msg = deserialize_binary_message(msg)
223 225 else:
224 226 msg = json.loads(msg)
225 227 channel = msg.pop('channel', None)
226 228 if channel is None:
227 229 self.log.warn("No channel specified, assuming shell: %s", msg)
228 230 channel = 'shell'
229 231 if channel not in self.channels:
230 232 self.log.warn("No such channel: %r", channel)
231 233 return
232 234 stream = self.channels[channel]
233 235 self.session.send(stream, msg)
234 236
235 237 def on_close(self):
236 238 km = self.kernel_manager
237 239 if self.kernel_id in km:
238 240 km.remove_restart_callback(
239 241 self.kernel_id, self.on_kernel_restarted,
240 242 )
241 243 km.remove_restart_callback(
242 244 self.kernel_id, self.on_restart_failed, 'dead',
243 245 )
244 246 # This method can be called twice, once by self.kernel_died and once
245 247 # from the WebSocket close event. If the WebSocket connection is
246 248 # closed before the ZMQ streams are setup, they could be None.
247 249 for channel, stream in self.channels.items():
248 250 if stream is not None and not stream.closed():
249 251 stream.on_recv(None)
250 252 # close the socket directly, don't wait for the stream
251 253 socket = stream.socket
252 254 stream.close()
253 255 socket.close()
254 256
255 257 self.channels = {}
256 258
257 259 def _send_status_message(self, status):
258 260 msg = self.session.msg("status",
259 261 {'execution_state': status}
260 262 )
261 263 msg['channel'] = 'iopub'
262 264 self.write_message(json.dumps(msg, default=date_default))
263 265
264 266 def on_kernel_restarted(self):
265 267 logging.warn("kernel %s restarted", self.kernel_id)
266 268 self._send_status_message('restarting')
267 269
268 270 def on_restart_failed(self):
269 271 logging.error("kernel %s restarted failed!", self.kernel_id)
270 272 self._send_status_message('dead')
271 273
272 274
273 275 #-----------------------------------------------------------------------------
274 276 # URL to handler mappings
275 277 #-----------------------------------------------------------------------------
276 278
277 279
278 280 _kernel_id_regex = r"(?P<kernel_id>\w+-\w+-\w+-\w+-\w+)"
279 281 _kernel_action_regex = r"(?P<action>restart|interrupt)"
280 282
281 283 default_handlers = [
282 284 (r"/api/kernels", MainKernelHandler),
283 285 (r"/api/kernels/%s" % _kernel_id_regex, KernelHandler),
284 286 (r"/api/kernels/%s/%s" % (_kernel_id_regex, _kernel_action_regex), KernelActionHandler),
285 287 (r"/api/kernels/%s/channels" % _kernel_id_regex, ZMQChannelsHandler),
286 288 ]
@@ -1,172 +1,172 b''
1 1 """An in-process kernel"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 from contextlib import contextmanager
7 7 import logging
8 8 import sys
9 9
10 10 from IPython.core.interactiveshell import InteractiveShellABC
11 11 from IPython.utils.jsonutil import json_clean
12 12 from IPython.utils.traitlets import Any, Enum, Instance, List, Type
13 13 from IPython.kernel.zmq.ipkernel import IPythonKernel
14 14 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
15 15
16 16 from .socket import DummySocket
17 17
18 18 #-----------------------------------------------------------------------------
19 19 # Main kernel class
20 20 #-----------------------------------------------------------------------------
21 21
22 22 class InProcessKernel(IPythonKernel):
23 23
24 24 #-------------------------------------------------------------------------
25 25 # InProcessKernel interface
26 26 #-------------------------------------------------------------------------
27 27
28 28 # The frontends connected to this kernel.
29 29 frontends = List(
30 30 Instance('IPython.kernel.inprocess.client.InProcessKernelClient')
31 31 )
32 32
33 33 # The GUI environment that the kernel is running under. This need not be
34 34 # specified for the normal operation for the kernel, but is required for
35 35 # IPython's GUI support (including pylab). The default is 'inline' because
36 36 # it is safe under all GUI toolkits.
37 37 gui = Enum(('tk', 'gtk', 'wx', 'qt', 'qt4', 'inline'),
38 38 default_value='inline')
39 39
40 40 raw_input_str = Any()
41 41 stdout = Any()
42 42 stderr = Any()
43 43
44 44 #-------------------------------------------------------------------------
45 45 # Kernel interface
46 46 #-------------------------------------------------------------------------
47 47
48 48 shell_class = Type()
49 49 shell_streams = List()
50 50 control_stream = Any()
51 51 iopub_socket = Instance(DummySocket, ())
52 52 stdin_socket = Instance(DummySocket, ())
53 53
54 54 def __init__(self, **traits):
55 55 # When an InteractiveShell is instantiated by our base class, it binds
56 56 # the current values of sys.stdout and sys.stderr.
57 57 with self._redirected_io():
58 58 super(InProcessKernel, self).__init__(**traits)
59 59
60 60 self.iopub_socket.on_trait_change(self._io_dispatch, 'message_sent')
61 61 self.shell.kernel = self
62 62
63 63 def execute_request(self, stream, ident, parent):
64 64 """ Override for temporary IO redirection. """
65 65 with self._redirected_io():
66 66 super(InProcessKernel, self).execute_request(stream, ident, parent)
67 67
68 68 def start(self):
69 69 """ Override registration of dispatchers for streams. """
70 70 self.shell.exit_now = False
71 71
72 72 def _abort_queue(self, stream):
73 73 """ The in-process kernel doesn't abort requests. """
74 74 pass
75 75
76 76 def _input_request(self, prompt, ident, parent, password=False):
77 77 # Flush output before making the request.
78 78 self.raw_input_str = None
79 79 sys.stderr.flush()
80 80 sys.stdout.flush()
81 81
82 82 # Send the input request.
83 83 content = json_clean(dict(prompt=prompt, password=password))
84 84 msg = self.session.msg(u'input_request', content, parent)
85 85 for frontend in self.frontends:
86 86 if frontend.session.session == parent['header']['session']:
87 87 frontend.stdin_channel.call_handlers(msg)
88 88 break
89 89 else:
90 90 logging.error('No frontend found for raw_input request')
91 91 return str()
92 92
93 93 # Await a response.
94 94 while self.raw_input_str is None:
95 95 frontend.stdin_channel.process_events()
96 96 return self.raw_input_str
97 97
98 98 #-------------------------------------------------------------------------
99 99 # Protected interface
100 100 #-------------------------------------------------------------------------
101 101
102 102 @contextmanager
103 103 def _redirected_io(self):
104 104 """ Temporarily redirect IO to the kernel.
105 105 """
106 106 sys_stdout, sys_stderr = sys.stdout, sys.stderr
107 107 sys.stdout, sys.stderr = self.stdout, self.stderr
108 108 yield
109 109 sys.stdout, sys.stderr = sys_stdout, sys_stderr
110 110
111 111 #------ Trait change handlers --------------------------------------------
112 112
113 113 def _io_dispatch(self):
114 114 """ Called when a message is sent to the IO socket.
115 115 """
116 116 ident, msg = self.session.recv(self.iopub_socket, copy=False)
117 117 for frontend in self.frontends:
118 118 frontend.iopub_channel.call_handlers(msg)
119 119
120 120 #------ Trait initializers -----------------------------------------------
121 121
122 122 def _log_default(self):
123 123 return logging.getLogger(__name__)
124 124
125 125 def _session_default(self):
126 126 from IPython.kernel.zmq.session import Session
127 return Session(parent=self)
127 return Session(parent=self, key=b'')
128 128
129 129 def _shell_class_default(self):
130 130 return InProcessInteractiveShell
131 131
132 132 def _stdout_default(self):
133 133 from IPython.kernel.zmq.iostream import OutStream
134 134 return OutStream(self.session, self.iopub_socket, u'stdout', pipe=False)
135 135
136 136 def _stderr_default(self):
137 137 from IPython.kernel.zmq.iostream import OutStream
138 138 return OutStream(self.session, self.iopub_socket, u'stderr', pipe=False)
139 139
140 140 #-----------------------------------------------------------------------------
141 141 # Interactive shell subclass
142 142 #-----------------------------------------------------------------------------
143 143
144 144 class InProcessInteractiveShell(ZMQInteractiveShell):
145 145
146 146 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel')
147 147
148 148 #-------------------------------------------------------------------------
149 149 # InteractiveShell interface
150 150 #-------------------------------------------------------------------------
151 151
152 152 def enable_gui(self, gui=None):
153 153 """Enable GUI integration for the kernel."""
154 154 from IPython.kernel.zmq.eventloops import enable_gui
155 155 if not gui:
156 156 gui = self.kernel.gui
157 157 return enable_gui(gui, kernel=self.kernel)
158 158
159 159 def enable_matplotlib(self, gui=None):
160 160 """Enable matplotlib integration for the kernel."""
161 161 if not gui:
162 162 gui = self.kernel.gui
163 163 return super(InProcessInteractiveShell, self).enable_matplotlib(gui)
164 164
165 165 def enable_pylab(self, gui=None, import_all=True, welcome_message=False):
166 166 """Activate pylab support at runtime."""
167 167 if not gui:
168 168 gui = self.kernel.gui
169 169 return super(InProcessInteractiveShell, self).enable_pylab(gui, import_all,
170 170 welcome_message)
171 171
172 172 InteractiveShellABC.register(InProcessInteractiveShell)
@@ -1,77 +1,71 b''
1 1 """A kernel manager for in-process kernels."""
2 2
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2013 The IPython Development Team
5 #
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
9
10 #-----------------------------------------------------------------------------
11 # Imports
12 #-----------------------------------------------------------------------------
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
13 5
14 6 from IPython.utils.traitlets import Instance, DottedObjectName
15 7 from IPython.kernel.managerabc import KernelManagerABC
16 8 from IPython.kernel.manager import KernelManager
9 from IPython.kernel.zmq.session import Session
17 10
18 #-----------------------------------------------------------------------------
19 # Main kernel manager class
20 #-----------------------------------------------------------------------------
21 11
22 12 class InProcessKernelManager(KernelManager):
23 13 """A manager for an in-process kernel.
24 14
25 15 This class implements the interface of
26 16 `IPython.kernel.kernelmanagerabc.KernelManagerABC` and allows
27 17 (asynchronous) frontends to be used seamlessly with an in-process kernel.
28 18
29 19 See `IPython.kernel.kernelmanager.KernelManager` for docstrings.
30 20 """
31 21
32 22 # The kernel process with which the KernelManager is communicating.
33 23 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel')
34 24 # the client class for KM.client() shortcut
35 25 client_class = DottedObjectName('IPython.kernel.inprocess.BlockingInProcessKernelClient')
36
26
27 def _session_default(self):
28 # don't sign in-process messages
29 return Session(key=b'', parent=self)
30
37 31 #--------------------------------------------------------------------------
38 32 # Kernel management methods
39 33 #--------------------------------------------------------------------------
40 34
41 35 def start_kernel(self, **kwds):
42 36 from IPython.kernel.inprocess.ipkernel import InProcessKernel
43 self.kernel = InProcessKernel()
37 self.kernel = InProcessKernel(parent=self, session=self.session)
44 38
45 39 def shutdown_kernel(self):
46 40 self._kill_kernel()
47 41
48 42 def restart_kernel(self, now=False, **kwds):
49 43 self.shutdown_kernel()
50 44 self.start_kernel(**kwds)
51 45
52 46 @property
53 47 def has_kernel(self):
54 48 return self.kernel is not None
55 49
56 50 def _kill_kernel(self):
57 51 self.kernel = None
58 52
59 53 def interrupt_kernel(self):
60 54 raise NotImplementedError("Cannot interrupt in-process kernel.")
61 55
62 56 def signal_kernel(self, signum):
63 57 raise NotImplementedError("Cannot signal in-process kernel.")
64 58
65 59 def is_alive(self):
66 60 return self.kernel is not None
67 61
68 62 def client(self, **kwargs):
69 63 kwargs['kernel'] = self.kernel
70 64 return super(InProcessKernelManager, self).client(**kwargs)
71 65
72 66
73 67 #-----------------------------------------------------------------------------
74 68 # ABC Registration
75 69 #-----------------------------------------------------------------------------
76 70
77 71 KernelManagerABC.register(InProcessKernelManager)
@@ -1,69 +1,69 b''
1 1 # Copyright (c) IPython Development Team.
2 2 # Distributed under the terms of the Modified BSD License.
3 3
4 4 from __future__ import print_function
5 5
6 6 import sys
7 7 import unittest
8 8
9 9 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
10 10 from IPython.kernel.inprocess.manager import InProcessKernelManager
11 11 from IPython.kernel.inprocess.ipkernel import InProcessKernel
12 12 from IPython.kernel.tests.utils import assemble_output
13 13 from IPython.testing.decorators import skipif_not_matplotlib
14 14 from IPython.utils.io import capture_output
15 15 from IPython.utils import py3compat
16 16
17 17 if py3compat.PY3:
18 18 from io import StringIO
19 19 else:
20 20 from StringIO import StringIO
21 21
22 22
23 23 class InProcessKernelTestCase(unittest.TestCase):
24 24
25 25 def setUp(self):
26 26 self.km = InProcessKernelManager()
27 27 self.km.start_kernel()
28 self.kc = BlockingInProcessKernelClient(kernel=self.km.kernel)
28 self.kc = self.km.client()
29 29 self.kc.start_channels()
30 30 self.kc.wait_for_ready()
31 31
32 32 @skipif_not_matplotlib
33 33 def test_pylab(self):
34 34 """Does %pylab work in the in-process kernel?"""
35 35 kc = self.kc
36 36 kc.execute('%pylab')
37 37 out, err = assemble_output(kc.iopub_channel)
38 38 self.assertIn('matplotlib', out)
39 39
40 40 def test_raw_input(self):
41 41 """ Does the in-process kernel handle raw_input correctly?
42 42 """
43 43 io = StringIO('foobar\n')
44 44 sys_stdin = sys.stdin
45 45 sys.stdin = io
46 46 try:
47 47 if py3compat.PY3:
48 48 self.kc.execute('x = input()')
49 49 else:
50 50 self.kc.execute('x = raw_input()')
51 51 finally:
52 52 sys.stdin = sys_stdin
53 53 self.assertEqual(self.km.kernel.shell.user_ns.get('x'), 'foobar')
54 54
55 55 def test_stdout(self):
56 56 """ Does the in-process kernel correctly capture IO?
57 57 """
58 58 kernel = InProcessKernel()
59 59
60 60 with capture_output() as io:
61 61 kernel.shell.run_cell('print("foo")')
62 62 self.assertEqual(io.stdout, 'foo\n')
63 63
64 kc = BlockingInProcessKernelClient(kernel=kernel)
64 kc = BlockingInProcessKernelClient(kernel=kernel, session=kernel.session)
65 65 kernel.frontends.append(kc)
66 66 kc.execute('print("bar")')
67 67 out, err = assemble_output(kc.iopub_channel)
68 68 self.assertEqual(out, 'bar\n')
69 69
@@ -1,108 +1,108 b''
1 1 # Copyright (c) IPython Development Team.
2 2 # Distributed under the terms of the Modified BSD License.
3 3
4 4 from __future__ import print_function
5 5
6 6 import unittest
7 7
8 8 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
9 9 from IPython.kernel.inprocess.manager import InProcessKernelManager
10 10
11 11 #-----------------------------------------------------------------------------
12 12 # Test case
13 13 #-----------------------------------------------------------------------------
14 14
15 15 class InProcessKernelManagerTestCase(unittest.TestCase):
16 16
17 17 def test_interface(self):
18 18 """ Does the in-process kernel manager implement the basic KM interface?
19 19 """
20 20 km = InProcessKernelManager()
21 21 self.assert_(not km.has_kernel)
22 22
23 23 km.start_kernel()
24 24 self.assert_(km.has_kernel)
25 25 self.assert_(km.kernel is not None)
26 26
27 kc = BlockingInProcessKernelClient(kernel=km.kernel)
27 kc = km.client()
28 28 self.assert_(not kc.channels_running)
29 29
30 30 kc.start_channels()
31 31 self.assert_(kc.channels_running)
32 32
33 33 old_kernel = km.kernel
34 34 km.restart_kernel()
35 35 self.assertIsNotNone(km.kernel)
36 36 self.assertNotEquals(km.kernel, old_kernel)
37 37
38 38 km.shutdown_kernel()
39 39 self.assert_(not km.has_kernel)
40 40
41 41 self.assertRaises(NotImplementedError, km.interrupt_kernel)
42 42 self.assertRaises(NotImplementedError, km.signal_kernel, 9)
43 43
44 44 kc.stop_channels()
45 45 self.assert_(not kc.channels_running)
46 46
47 47 def test_execute(self):
48 48 """ Does executing code in an in-process kernel work?
49 49 """
50 50 km = InProcessKernelManager()
51 51 km.start_kernel()
52 kc = BlockingInProcessKernelClient(kernel=km.kernel)
52 kc = km.client()
53 53 kc.start_channels()
54 54 kc.wait_for_ready()
55 55 kc.execute('foo = 1')
56 56 self.assertEquals(km.kernel.shell.user_ns['foo'], 1)
57 57
58 58 def test_complete(self):
59 59 """ Does requesting completion from an in-process kernel work?
60 60 """
61 61 km = InProcessKernelManager()
62 62 km.start_kernel()
63 kc = BlockingInProcessKernelClient(kernel=km.kernel)
63 kc = km.client()
64 64 kc.start_channels()
65 65 kc.wait_for_ready()
66 66 km.kernel.shell.push({'my_bar': 0, 'my_baz': 1})
67 67 kc.complete('my_ba', 5)
68 68 msg = kc.get_shell_msg()
69 69 self.assertEqual(msg['header']['msg_type'], 'complete_reply')
70 70 self.assertEqual(sorted(msg['content']['matches']),
71 71 ['my_bar', 'my_baz'])
72 72
73 73 def test_inspect(self):
74 74 """ Does requesting object information from an in-process kernel work?
75 75 """
76 76 km = InProcessKernelManager()
77 77 km.start_kernel()
78 kc = BlockingInProcessKernelClient(kernel=km.kernel)
78 kc = km.client()
79 79 kc.start_channels()
80 80 kc.wait_for_ready()
81 81 km.kernel.shell.user_ns['foo'] = 1
82 82 kc.inspect('foo')
83 83 msg = kc.get_shell_msg()
84 84 self.assertEqual(msg['header']['msg_type'], 'inspect_reply')
85 85 content = msg['content']
86 86 assert content['found']
87 87 text = content['data']['text/plain']
88 88 self.assertIn('int', text)
89 89
90 90 def test_history(self):
91 91 """ Does requesting history from an in-process kernel work?
92 92 """
93 93 km = InProcessKernelManager()
94 94 km.start_kernel()
95 kc = BlockingInProcessKernelClient(kernel=km.kernel)
95 kc = km.client()
96 96 kc.start_channels()
97 97 kc.wait_for_ready()
98 98 kc.execute('%who')
99 99 kc.history(hist_access_type='tail', n=1)
100 100 msg = kc.shell_channel.get_msgs()[-1]
101 101 self.assertEquals(msg['header']['msg_type'], 'history_reply')
102 102 history = msg['content']['history']
103 103 self.assertEquals(len(history), 1)
104 104 self.assertEquals(history[0][2], '%who')
105 105
106 106
107 107 if __name__ == '__main__':
108 108 unittest.main()
@@ -1,388 +1,387 b''
1 1 """An Application for launching a kernel"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 from __future__ import print_function
7 7
8 8 import atexit
9 9 import os
10 10 import sys
11 11 import signal
12 12
13 13 import zmq
14 14 from zmq.eventloop import ioloop
15 15 from zmq.eventloop.zmqstream import ZMQStream
16 16
17 17 from IPython.core.ultratb import FormattedTB
18 18 from IPython.core.application import (
19 19 BaseIPythonApplication, base_flags, base_aliases, catch_config_error
20 20 )
21 21 from IPython.core.profiledir import ProfileDir
22 22 from IPython.core.shellapp import (
23 23 InteractiveShellApp, shell_flags, shell_aliases
24 24 )
25 25 from IPython.utils import io
26 26 from IPython.utils.path import filefind
27 27 from IPython.utils.traitlets import (
28 28 Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type,
29 29 )
30 30 from IPython.utils.importstring import import_item
31 31 from IPython.kernel import write_connection_file
32 32 from IPython.kernel.connect import ConnectionFileMixin
33 33
34 34 # local imports
35 35 from .heartbeat import Heartbeat
36 36 from .ipkernel import IPythonKernel
37 37 from .parentpoller import ParentPollerUnix, ParentPollerWindows
38 38 from .session import (
39 Session, session_flags, session_aliases, default_secure,
39 Session, session_flags, session_aliases,
40 40 )
41 41 from .zmqshell import ZMQInteractiveShell
42 42
43 43 #-----------------------------------------------------------------------------
44 44 # Flags and Aliases
45 45 #-----------------------------------------------------------------------------
46 46
47 47 kernel_aliases = dict(base_aliases)
48 48 kernel_aliases.update({
49 49 'ip' : 'IPKernelApp.ip',
50 50 'hb' : 'IPKernelApp.hb_port',
51 51 'shell' : 'IPKernelApp.shell_port',
52 52 'iopub' : 'IPKernelApp.iopub_port',
53 53 'stdin' : 'IPKernelApp.stdin_port',
54 54 'control' : 'IPKernelApp.control_port',
55 55 'f' : 'IPKernelApp.connection_file',
56 56 'transport': 'IPKernelApp.transport',
57 57 })
58 58
59 59 kernel_flags = dict(base_flags)
60 60 kernel_flags.update({
61 61 'no-stdout' : (
62 62 {'IPKernelApp' : {'no_stdout' : True}},
63 63 "redirect stdout to the null device"),
64 64 'no-stderr' : (
65 65 {'IPKernelApp' : {'no_stderr' : True}},
66 66 "redirect stderr to the null device"),
67 67 'pylab' : (
68 68 {'IPKernelApp' : {'pylab' : 'auto'}},
69 69 """Pre-load matplotlib and numpy for interactive use with
70 70 the default matplotlib backend."""),
71 71 })
72 72
73 73 # inherit flags&aliases for any IPython shell apps
74 74 kernel_aliases.update(shell_aliases)
75 75 kernel_flags.update(shell_flags)
76 76
77 77 # inherit flags&aliases for Sessions
78 78 kernel_aliases.update(session_aliases)
79 79 kernel_flags.update(session_flags)
80 80
81 81 _ctrl_c_message = """\
82 82 NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
83 83
84 84 To exit, you will have to explicitly quit this process, by either sending
85 85 "quit" from a client, or using Ctrl-\\ in UNIX-like environments.
86 86
87 87 To read more about this, see https://github.com/ipython/ipython/issues/2049
88 88
89 89 """
90 90
91 91 #-----------------------------------------------------------------------------
92 92 # Application class for starting an IPython Kernel
93 93 #-----------------------------------------------------------------------------
94 94
95 95 class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,
96 96 ConnectionFileMixin):
97 97 name='ipython-kernel'
98 98 aliases = Dict(kernel_aliases)
99 99 flags = Dict(kernel_flags)
100 100 classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session]
101 101 # the kernel class, as an importstring
102 102 kernel_class = Type('IPython.kernel.zmq.ipkernel.IPythonKernel', config=True,
103 103 klass='IPython.kernel.zmq.kernelbase.Kernel',
104 104 help="""The Kernel subclass to be used.
105 105
106 106 This should allow easy re-use of the IPKernelApp entry point
107 107 to configure and launch kernels other than IPython's own.
108 108 """)
109 109 kernel = Any()
110 110 poller = Any() # don't restrict this even though current pollers are all Threads
111 111 heartbeat = Instance(Heartbeat)
112 112 ports = Dict()
113 113
114 114 # connection info:
115 115
116 116 @property
117 117 def abs_connection_file(self):
118 118 if os.path.basename(self.connection_file) == self.connection_file:
119 119 return os.path.join(self.profile_dir.security_dir, self.connection_file)
120 120 else:
121 121 return self.connection_file
122 122
123 123
124 124 # streams, etc.
125 125 no_stdout = Bool(False, config=True, help="redirect stdout to the null device")
126 126 no_stderr = Bool(False, config=True, help="redirect stderr to the null device")
127 127 outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream',
128 128 config=True, help="The importstring for the OutStream factory")
129 129 displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook',
130 130 config=True, help="The importstring for the DisplayHook factory")
131 131
132 132 # polling
133 133 parent_handle = Integer(int(os.environ.get('JPY_PARENT_PID') or 0), config=True,
134 134 help="""kill this process if its parent dies. On Windows, the argument
135 135 specifies the HANDLE of the parent process, otherwise it is simply boolean.
136 136 """)
137 137 interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0), config=True,
138 138 help="""ONLY USED ON WINDOWS
139 139 Interrupt this process when the parent is signaled.
140 140 """)
141 141
142 142 def init_crash_handler(self):
143 143 # Install minimal exception handling
144 144 sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor',
145 145 ostream=sys.__stdout__)
146 146
147 147 def init_poller(self):
148 148 if sys.platform == 'win32':
149 149 if self.interrupt or self.parent_handle:
150 150 self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
151 151 elif self.parent_handle:
152 152 self.poller = ParentPollerUnix()
153 153
154 154 def _bind_socket(self, s, port):
155 155 iface = '%s://%s' % (self.transport, self.ip)
156 156 if self.transport == 'tcp':
157 157 if port <= 0:
158 158 port = s.bind_to_random_port(iface)
159 159 else:
160 160 s.bind("tcp://%s:%i" % (self.ip, port))
161 161 elif self.transport == 'ipc':
162 162 if port <= 0:
163 163 port = 1
164 164 path = "%s-%i" % (self.ip, port)
165 165 while os.path.exists(path):
166 166 port = port + 1
167 167 path = "%s-%i" % (self.ip, port)
168 168 else:
169 169 path = "%s-%i" % (self.ip, port)
170 170 s.bind("ipc://%s" % path)
171 171 return port
172 172
173 173 def write_connection_file(self):
174 174 """write connection info to JSON file"""
175 175 cf = self.abs_connection_file
176 176 self.log.debug("Writing connection file: %s", cf)
177 177 write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
178 178 shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
179 179 iopub_port=self.iopub_port, control_port=self.control_port)
180 180
181 181 def cleanup_connection_file(self):
182 182 cf = self.abs_connection_file
183 183 self.log.debug("Cleaning up connection file: %s", cf)
184 184 try:
185 185 os.remove(cf)
186 186 except (IOError, OSError):
187 187 pass
188 188
189 189 self.cleanup_ipc_files()
190 190
191 191 def init_connection_file(self):
192 192 if not self.connection_file:
193 193 self.connection_file = "kernel-%s.json"%os.getpid()
194 194 try:
195 195 self.connection_file = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
196 196 except IOError:
197 197 self.log.debug("Connection file not found: %s", self.connection_file)
198 198 # This means I own it, so I will clean it up:
199 199 atexit.register(self.cleanup_connection_file)
200 200 return
201 201 try:
202 202 self.load_connection_file()
203 203 except Exception:
204 204 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
205 205 self.exit(1)
206 206
207 207 def init_sockets(self):
208 208 # Create a context, a session, and the kernel sockets.
209 209 self.log.info("Starting the kernel at pid: %i", os.getpid())
210 210 context = zmq.Context.instance()
211 211 # Uncomment this to try closing the context.
212 212 # atexit.register(context.term)
213 213
214 214 self.shell_socket = context.socket(zmq.ROUTER)
215 215 self.shell_socket.linger = 1000
216 216 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
217 217 self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
218 218
219 219 self.iopub_socket = context.socket(zmq.PUB)
220 220 self.iopub_socket.linger = 1000
221 221 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
222 222 self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
223 223
224 224 self.stdin_socket = context.socket(zmq.ROUTER)
225 225 self.stdin_socket.linger = 1000
226 226 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
227 227 self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
228 228
229 229 self.control_socket = context.socket(zmq.ROUTER)
230 230 self.control_socket.linger = 1000
231 231 self.control_port = self._bind_socket(self.control_socket, self.control_port)
232 232 self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
233 233
234 234 def init_heartbeat(self):
235 235 """start the heart beating"""
236 236 # heartbeat doesn't share context, because it mustn't be blocked
237 237 # by the GIL, which is accessed by libzmq when freeing zero-copy messages
238 238 hb_ctx = zmq.Context()
239 239 self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
240 240 self.hb_port = self.heartbeat.port
241 241 self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
242 242 self.heartbeat.start()
243 243
244 244 def log_connection_info(self):
245 245 """display connection info, and store ports"""
246 246 basename = os.path.basename(self.connection_file)
247 247 if basename == self.connection_file or \
248 248 os.path.dirname(self.connection_file) == self.profile_dir.security_dir:
249 249 # use shortname
250 250 tail = basename
251 251 if self.profile != 'default':
252 252 tail += " --profile %s" % self.profile
253 253 else:
254 254 tail = self.connection_file
255 255 lines = [
256 256 "To connect another client to this kernel, use:",
257 257 " --existing %s" % tail,
258 258 ]
259 259 # log connection info
260 260 # info-level, so often not shown.
261 261 # frontends should use the %connect_info magic
262 262 # to see the connection info
263 263 for line in lines:
264 264 self.log.info(line)
265 265 # also raw print to the terminal if no parent_handle (`ipython kernel`)
266 266 if not self.parent_handle:
267 267 io.rprint(_ctrl_c_message)
268 268 for line in lines:
269 269 io.rprint(line)
270 270
271 271 self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
272 272 stdin=self.stdin_port, hb=self.hb_port,
273 273 control=self.control_port)
274 274
275 275 def init_blackhole(self):
276 276 """redirects stdout/stderr to devnull if necessary"""
277 277 if self.no_stdout or self.no_stderr:
278 278 blackhole = open(os.devnull, 'w')
279 279 if self.no_stdout:
280 280 sys.stdout = sys.__stdout__ = blackhole
281 281 if self.no_stderr:
282 282 sys.stderr = sys.__stderr__ = blackhole
283 283
284 284 def init_io(self):
285 285 """Redirect input streams and set a display hook."""
286 286 if self.outstream_class:
287 287 outstream_factory = import_item(str(self.outstream_class))
288 288 sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout')
289 289 sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr')
290 290 if self.displayhook_class:
291 291 displayhook_factory = import_item(str(self.displayhook_class))
292 292 sys.displayhook = displayhook_factory(self.session, self.iopub_socket)
293 293
294 294 def init_signal(self):
295 295 signal.signal(signal.SIGINT, signal.SIG_IGN)
296 296
297 297 def init_kernel(self):
298 298 """Create the Kernel object itself"""
299 299 shell_stream = ZMQStream(self.shell_socket)
300 300 control_stream = ZMQStream(self.control_socket)
301 301
302 302 kernel_factory = self.kernel_class.instance
303 303
304 304 kernel = kernel_factory(parent=self, session=self.session,
305 305 shell_streams=[shell_stream, control_stream],
306 306 iopub_socket=self.iopub_socket,
307 307 stdin_socket=self.stdin_socket,
308 308 log=self.log,
309 309 profile_dir=self.profile_dir,
310 310 user_ns=self.user_ns,
311 311 )
312 312 kernel.record_ports(self.ports)
313 313 self.kernel = kernel
314 314
315 315 def init_gui_pylab(self):
316 316 """Enable GUI event loop integration, taking pylab into account."""
317 317
318 318 # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
319 319 # to ensure that any exception is printed straight to stderr.
320 320 # Normally _showtraceback associates the reply with an execution,
321 321 # which means frontends will never draw it, as this exception
322 322 # is not associated with any execute request.
323 323
324 324 shell = self.shell
325 325 _showtraceback = shell._showtraceback
326 326 try:
327 327 # replace error-sending traceback with stderr
328 328 def print_tb(etype, evalue, stb):
329 329 print ("GUI event loop or pylab initialization failed",
330 330 file=io.stderr)
331 331 print (shell.InteractiveTB.stb2text(stb), file=io.stderr)
332 332 shell._showtraceback = print_tb
333 333 InteractiveShellApp.init_gui_pylab(self)
334 334 finally:
335 335 shell._showtraceback = _showtraceback
336 336
337 337 def init_shell(self):
338 338 self.shell = getattr(self.kernel, 'shell', None)
339 339 if self.shell:
340 340 self.shell.configurables.append(self)
341 341
342 342 @catch_config_error
343 343 def initialize(self, argv=None):
344 344 super(IPKernelApp, self).initialize(argv)
345 default_secure(self.config)
346 345 self.init_blackhole()
347 346 self.init_connection_file()
348 347 self.init_poller()
349 348 self.init_sockets()
350 349 self.init_heartbeat()
351 350 # writing/displaying connection info must be *after* init_sockets/heartbeat
352 351 self.log_connection_info()
353 352 self.write_connection_file()
354 353 self.init_io()
355 354 self.init_signal()
356 355 self.init_kernel()
357 356 # shell init steps
358 357 self.init_path()
359 358 self.init_shell()
360 359 if self.shell:
361 360 self.init_gui_pylab()
362 361 self.init_extensions()
363 362 self.init_code()
364 363 # flush stdout/stderr, so that anything written to these streams during
365 364 # initialization do not get associated with the first execution request
366 365 sys.stdout.flush()
367 366 sys.stderr.flush()
368 367
369 368 def start(self):
370 369 if self.poller is not None:
371 370 self.poller.start()
372 371 self.kernel.start()
373 372 try:
374 373 ioloop.IOLoop.instance().start()
375 374 except KeyboardInterrupt:
376 375 pass
377 376
378 377 launch_new_instance = IPKernelApp.launch_instance
379 378
380 379 def main():
381 380 """Run an IPKernel as an application"""
382 381 app = IPKernelApp.instance()
383 382 app.initialize()
384 383 app.start()
385 384
386 385
387 386 if __name__ == '__main__':
388 387 main()
@@ -1,873 +1,877 b''
1 1 """Session object for building, serializing, sending, and receiving messages in
2 2 IPython. The Session object supports serialization, HMAC signatures, and
3 3 metadata on messages.
4 4
5 5 Also defined here are utilities for working with Sessions:
6 6 * A SessionFactory to be used as a base class for configurables that work with
7 7 Sessions.
8 8 * A Message object for convenience that allows attribute-access to the msg dict.
9 9 """
10 10
11 11 # Copyright (c) IPython Development Team.
12 12 # Distributed under the terms of the Modified BSD License.
13 13
14 14 import hashlib
15 15 import hmac
16 16 import logging
17 17 import os
18 18 import pprint
19 19 import random
20 20 import uuid
21 21 import warnings
22 22 from datetime import datetime
23 23
24 24 try:
25 25 import cPickle
26 26 pickle = cPickle
27 27 except:
28 28 cPickle = None
29 29 import pickle
30 30
31 31 try:
32 32 # We are using compare_digest to limit the surface of timing attacks
33 33 from hmac import compare_digest
34 34 except ImportError:
35 35 # Python < 2.7.7: When digests don't match no feedback is provided,
36 36 # limiting the surface of attack
37 37 def compare_digest(a,b): return a == b
38 38
39 39 import zmq
40 40 from zmq.utils import jsonapi
41 41 from zmq.eventloop.ioloop import IOLoop
42 42 from zmq.eventloop.zmqstream import ZMQStream
43 43
44 44 from IPython.core.release import kernel_protocol_version
45 45 from IPython.config.configurable import Configurable, LoggingConfigurable
46 46 from IPython.utils import io
47 47 from IPython.utils.importstring import import_item
48 48 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
49 49 from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type,
50 50 iteritems)
51 51 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
52 52 DottedObjectName, CUnicode, Dict, Integer,
53 53 TraitError,
54 54 )
55 55 from IPython.utils.pickleutil import PICKLE_PROTOCOL
56 56 from IPython.kernel.adapter import adapt
57 57 from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES
58 58
59 59 #-----------------------------------------------------------------------------
60 60 # utility functions
61 61 #-----------------------------------------------------------------------------
62 62
63 63 def squash_unicode(obj):
64 64 """coerce unicode back to bytestrings."""
65 65 if isinstance(obj,dict):
66 66 for key in obj.keys():
67 67 obj[key] = squash_unicode(obj[key])
68 68 if isinstance(key, unicode_type):
69 69 obj[squash_unicode(key)] = obj.pop(key)
70 70 elif isinstance(obj, list):
71 71 for i,v in enumerate(obj):
72 72 obj[i] = squash_unicode(v)
73 73 elif isinstance(obj, unicode_type):
74 74 obj = obj.encode('utf8')
75 75 return obj
76 76
77 77 #-----------------------------------------------------------------------------
78 78 # globals and defaults
79 79 #-----------------------------------------------------------------------------
80 80
81 81 # ISO8601-ify datetime objects
82 82 # allow unicode
83 83 # disallow nan, because it's not actually valid JSON
84 84 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default,
85 85 ensure_ascii=False, allow_nan=False,
86 86 )
87 87 json_unpacker = lambda s: jsonapi.loads(s)
88 88
89 89 pickle_packer = lambda o: pickle.dumps(squash_dates(o), PICKLE_PROTOCOL)
90 90 pickle_unpacker = pickle.loads
91 91
92 92 default_packer = json_packer
93 93 default_unpacker = json_unpacker
94 94
95 95 DELIM = b"<IDS|MSG>"
96 96 # singleton dummy tracker, which will always report as done
97 97 DONE = zmq.MessageTracker()
98 98
99 99 #-----------------------------------------------------------------------------
100 100 # Mixin tools for apps that use Sessions
101 101 #-----------------------------------------------------------------------------
102 102
103 103 session_aliases = dict(
104 104 ident = 'Session.session',
105 105 user = 'Session.username',
106 106 keyfile = 'Session.keyfile',
107 107 )
108 108
109 109 session_flags = {
110 110 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
111 111 'keyfile' : '' }},
112 112 """Use HMAC digests for authentication of messages.
113 113 Setting this flag will generate a new UUID to use as the HMAC key.
114 114 """),
115 115 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
116 116 """Don't authenticate messages."""),
117 117 }
118 118
119 119 def default_secure(cfg):
120 120 """Set the default behavior for a config environment to be secure.
121 121
122 122 If Session.key/keyfile have not been set, set Session.key to
123 123 a new random UUID.
124 124 """
125
125 warnings.warn("default_secure is deprecated", DeprecationWarning)
126 126 if 'Session' in cfg:
127 127 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
128 128 return
129 129 # key/keyfile not specified, generate new UUID:
130 130 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
131 131
132 132
133 133 #-----------------------------------------------------------------------------
134 134 # Classes
135 135 #-----------------------------------------------------------------------------
136 136
137 137 class SessionFactory(LoggingConfigurable):
138 138 """The Base class for configurables that have a Session, Context, logger,
139 139 and IOLoop.
140 140 """
141 141
142 142 logname = Unicode('')
143 143 def _logname_changed(self, name, old, new):
144 144 self.log = logging.getLogger(new)
145 145
146 146 # not configurable:
147 147 context = Instance('zmq.Context')
148 148 def _context_default(self):
149 149 return zmq.Context.instance()
150 150
151 151 session = Instance('IPython.kernel.zmq.session.Session')
152 152
153 153 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
154 154 def _loop_default(self):
155 155 return IOLoop.instance()
156 156
157 157 def __init__(self, **kwargs):
158 158 super(SessionFactory, self).__init__(**kwargs)
159 159
160 160 if self.session is None:
161 161 # construct the session
162 162 self.session = Session(**kwargs)
163 163
164 164
165 165 class Message(object):
166 166 """A simple message object that maps dict keys to attributes.
167 167
168 168 A Message can be created from a dict and a dict from a Message instance
169 169 simply by calling dict(msg_obj)."""
170 170
171 171 def __init__(self, msg_dict):
172 172 dct = self.__dict__
173 173 for k, v in iteritems(dict(msg_dict)):
174 174 if isinstance(v, dict):
175 175 v = Message(v)
176 176 dct[k] = v
177 177
178 178 # Having this iterator lets dict(msg_obj) work out of the box.
179 179 def __iter__(self):
180 180 return iter(iteritems(self.__dict__))
181 181
182 182 def __repr__(self):
183 183 return repr(self.__dict__)
184 184
185 185 def __str__(self):
186 186 return pprint.pformat(self.__dict__)
187 187
188 188 def __contains__(self, k):
189 189 return k in self.__dict__
190 190
191 191 def __getitem__(self, k):
192 192 return self.__dict__[k]
193 193
194 194
195 195 def msg_header(msg_id, msg_type, username, session):
196 196 date = datetime.now()
197 197 version = kernel_protocol_version
198 198 return locals()
199 199
200 200 def extract_header(msg_or_header):
201 201 """Given a message or header, return the header."""
202 202 if not msg_or_header:
203 203 return {}
204 204 try:
205 205 # See if msg_or_header is the entire message.
206 206 h = msg_or_header['header']
207 207 except KeyError:
208 208 try:
209 209 # See if msg_or_header is just the header
210 210 h = msg_or_header['msg_id']
211 211 except KeyError:
212 212 raise
213 213 else:
214 214 h = msg_or_header
215 215 if not isinstance(h, dict):
216 216 h = dict(h)
217 217 return h
218 218
219 219 class Session(Configurable):
220 220 """Object for handling serialization and sending of messages.
221 221
222 222 The Session object handles building messages and sending them
223 223 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
224 224 other over the network via Session objects, and only need to work with the
225 225 dict-based IPython message spec. The Session will handle
226 226 serialization/deserialization, security, and metadata.
227 227
228 228 Sessions support configurable serialization via packer/unpacker traits,
229 229 and signing with HMAC digests via the key/keyfile traits.
230 230
231 231 Parameters
232 232 ----------
233 233
234 234 debug : bool
235 235 whether to trigger extra debugging statements
236 236 packer/unpacker : str : 'json', 'pickle' or import_string
237 237 importstrings for methods to serialize message parts. If just
238 238 'json' or 'pickle', predefined JSON and pickle packers will be used.
239 239 Otherwise, the entire importstring must be used.
240 240
241 241 The functions must accept at least valid JSON input, and output *bytes*.
242 242
243 243 For example, to use msgpack:
244 244 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
245 245 pack/unpack : callables
246 246 You can also set the pack/unpack callables for serialization directly.
247 247 session : bytes
248 248 the ID of this Session object. The default is to generate a new UUID.
249 249 username : unicode
250 250 username added to message headers. The default is to ask the OS.
251 251 key : bytes
252 252 The key used to initialize an HMAC signature. If unset, messages
253 253 will not be signed or checked.
254 254 keyfile : filepath
255 255 The file containing a key. If this is set, `key` will be initialized
256 256 to the contents of the file.
257 257
258 258 """
259 259
260 260 debug=Bool(False, config=True, help="""Debug output in the Session""")
261 261
262 262 packer = DottedObjectName('json',config=True,
263 263 help="""The name of the packer for serializing messages.
264 264 Should be one of 'json', 'pickle', or an import name
265 265 for a custom callable serializer.""")
266 266 def _packer_changed(self, name, old, new):
267 267 if new.lower() == 'json':
268 268 self.pack = json_packer
269 269 self.unpack = json_unpacker
270 270 self.unpacker = new
271 271 elif new.lower() == 'pickle':
272 272 self.pack = pickle_packer
273 273 self.unpack = pickle_unpacker
274 274 self.unpacker = new
275 275 else:
276 276 self.pack = import_item(str(new))
277 277
278 278 unpacker = DottedObjectName('json', config=True,
279 279 help="""The name of the unpacker for unserializing messages.
280 280 Only used with custom functions for `packer`.""")
281 281 def _unpacker_changed(self, name, old, new):
282 282 if new.lower() == 'json':
283 283 self.pack = json_packer
284 284 self.unpack = json_unpacker
285 285 self.packer = new
286 286 elif new.lower() == 'pickle':
287 287 self.pack = pickle_packer
288 288 self.unpack = pickle_unpacker
289 289 self.packer = new
290 290 else:
291 291 self.unpack = import_item(str(new))
292 292
293 293 session = CUnicode(u'', config=True,
294 294 help="""The UUID identifying this session.""")
295 295 def _session_default(self):
296 296 u = unicode_type(uuid.uuid4())
297 297 self.bsession = u.encode('ascii')
298 298 return u
299 299
300 300 def _session_changed(self, name, old, new):
301 301 self.bsession = self.session.encode('ascii')
302 302
303 303 # bsession is the session as bytes
304 304 bsession = CBytes(b'')
305 305
306 306 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
307 307 help="""Username for the Session. Default is your system username.""",
308 308 config=True)
309 309
310 310 metadata = Dict({}, config=True,
311 311 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
312 312
313 313 # if 0, no adapting to do.
314 314 adapt_version = Integer(0)
315 315
316 316 # message signature related traits:
317 317
318 key = CBytes(b'', config=True,
319 help="""execution key, for extra authentication.""")
318 key = CBytes(config=True,
319 help="""execution key, for signing messages.""")
320 def _key_default(self):
321 return str_to_bytes(str(uuid.uuid4()))
322
320 323 def _key_changed(self):
321 324 self._new_auth()
322 325
323 326 signature_scheme = Unicode('hmac-sha256', config=True,
324 327 help="""The digest scheme used to construct the message signatures.
325 328 Must have the form 'hmac-HASH'.""")
326 329 def _signature_scheme_changed(self, name, old, new):
327 330 if not new.startswith('hmac-'):
328 331 raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
329 332 hash_name = new.split('-', 1)[1]
330 333 try:
331 334 self.digest_mod = getattr(hashlib, hash_name)
332 335 except AttributeError:
333 336 raise TraitError("hashlib has no such attribute: %s" % hash_name)
334 337 self._new_auth()
335 338
336 339 digest_mod = Any()
337 340 def _digest_mod_default(self):
338 341 return hashlib.sha256
339 342
340 343 auth = Instance(hmac.HMAC)
341 344
342 345 def _new_auth(self):
343 346 if self.key:
344 347 self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)
345 348 else:
346 349 self.auth = None
347 350
348 351 digest_history = Set()
349 352 digest_history_size = Integer(2**16, config=True,
350 353 help="""The maximum number of digests to remember.
351 354
352 355 The digest history will be culled when it exceeds this value.
353 356 """
354 357 )
355 358
356 359 keyfile = Unicode('', config=True,
357 360 help="""path to file containing execution key.""")
358 361 def _keyfile_changed(self, name, old, new):
359 362 with open(new, 'rb') as f:
360 363 self.key = f.read().strip()
361 364
362 365 # for protecting against sends from forks
363 366 pid = Integer()
364 367
365 368 # serialization traits:
366 369
367 370 pack = Any(default_packer) # the actual packer function
368 371 def _pack_changed(self, name, old, new):
369 372 if not callable(new):
370 373 raise TypeError("packer must be callable, not %s"%type(new))
371 374
372 375 unpack = Any(default_unpacker) # the actual packer function
373 376 def _unpack_changed(self, name, old, new):
374 377 # unpacker is not checked - it is assumed to be
375 378 if not callable(new):
376 379 raise TypeError("unpacker must be callable, not %s"%type(new))
377 380
378 381 # thresholds:
379 382 copy_threshold = Integer(2**16, config=True,
380 383 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
381 384 buffer_threshold = Integer(MAX_BYTES, config=True,
382 385 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
383 386 item_threshold = Integer(MAX_ITEMS, config=True,
384 387 help="""The maximum number of items for a container to be introspected for custom serialization.
385 388 Containers larger than this are pickled outright.
386 389 """
387 390 )
388 391
389 392
390 393 def __init__(self, **kwargs):
391 394 """create a Session object
392 395
393 396 Parameters
394 397 ----------
395 398
396 399 debug : bool
397 400 whether to trigger extra debugging statements
398 401 packer/unpacker : str : 'json', 'pickle' or import_string
399 402 importstrings for methods to serialize message parts. If just
400 403 'json' or 'pickle', predefined JSON and pickle packers will be used.
401 404 Otherwise, the entire importstring must be used.
402 405
403 406 The functions must accept at least valid JSON input, and output
404 407 *bytes*.
405 408
406 409 For example, to use msgpack:
407 410 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
408 411 pack/unpack : callables
409 412 You can also set the pack/unpack callables for serialization
410 413 directly.
411 414 session : unicode (must be ascii)
412 415 the ID of this Session object. The default is to generate a new
413 416 UUID.
414 417 bsession : bytes
415 418 The session as bytes
416 419 username : unicode
417 420 username added to message headers. The default is to ask the OS.
418 421 key : bytes
419 422 The key used to initialize an HMAC signature. If unset, messages
420 423 will not be signed or checked.
421 424 signature_scheme : str
422 425 The message digest scheme. Currently must be of the form 'hmac-HASH',
423 426 where 'HASH' is a hashing function available in Python's hashlib.
424 427 The default is 'hmac-sha256'.
425 428 This is ignored if 'key' is empty.
426 429 keyfile : filepath
427 430 The file containing a key. If this is set, `key` will be
428 431 initialized to the contents of the file.
429 432 """
430 433 super(Session, self).__init__(**kwargs)
431 434 self._check_packers()
432 435 self.none = self.pack({})
433 436 # ensure self._session_default() if necessary, so bsession is defined:
434 437 self.session
435 438 self.pid = os.getpid()
439 self._new_auth()
436 440
437 441 @property
438 442 def msg_id(self):
439 443 """always return new uuid"""
440 444 return str(uuid.uuid4())
441 445
442 446 def _check_packers(self):
443 447 """check packers for datetime support."""
444 448 pack = self.pack
445 449 unpack = self.unpack
446 450
447 451 # check simple serialization
448 452 msg = dict(a=[1,'hi'])
449 453 try:
450 454 packed = pack(msg)
451 455 except Exception as e:
452 456 msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
453 457 if self.packer == 'json':
454 458 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
455 459 else:
456 460 jsonmsg = ""
457 461 raise ValueError(
458 462 msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)
459 463 )
460 464
461 465 # ensure packed message is bytes
462 466 if not isinstance(packed, bytes):
463 467 raise ValueError("message packed to %r, but bytes are required"%type(packed))
464 468
465 469 # check that unpack is pack's inverse
466 470 try:
467 471 unpacked = unpack(packed)
468 472 assert unpacked == msg
469 473 except Exception as e:
470 474 msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
471 475 if self.packer == 'json':
472 476 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
473 477 else:
474 478 jsonmsg = ""
475 479 raise ValueError(
476 480 msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)
477 481 )
478 482
479 483 # check datetime support
480 484 msg = dict(t=datetime.now())
481 485 try:
482 486 unpacked = unpack(pack(msg))
483 487 if isinstance(unpacked['t'], datetime):
484 488 raise ValueError("Shouldn't deserialize to datetime")
485 489 except Exception:
486 490 self.pack = lambda o: pack(squash_dates(o))
487 491 self.unpack = lambda s: unpack(s)
488 492
489 493 def msg_header(self, msg_type):
490 494 return msg_header(self.msg_id, msg_type, self.username, self.session)
491 495
492 496 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
493 497 """Return the nested message dict.
494 498
495 499 This format is different from what is sent over the wire. The
496 500 serialize/deserialize methods converts this nested message dict to the wire
497 501 format, which is a list of message parts.
498 502 """
499 503 msg = {}
500 504 header = self.msg_header(msg_type) if header is None else header
501 505 msg['header'] = header
502 506 msg['msg_id'] = header['msg_id']
503 507 msg['msg_type'] = header['msg_type']
504 508 msg['parent_header'] = {} if parent is None else extract_header(parent)
505 509 msg['content'] = {} if content is None else content
506 510 msg['metadata'] = self.metadata.copy()
507 511 if metadata is not None:
508 512 msg['metadata'].update(metadata)
509 513 return msg
510 514
511 515 def sign(self, msg_list):
512 516 """Sign a message with HMAC digest. If no auth, return b''.
513 517
514 518 Parameters
515 519 ----------
516 520 msg_list : list
517 521 The [p_header,p_parent,p_content] part of the message list.
518 522 """
519 523 if self.auth is None:
520 524 return b''
521 525 h = self.auth.copy()
522 526 for m in msg_list:
523 527 h.update(m)
524 528 return str_to_bytes(h.hexdigest())
525 529
526 530 def serialize(self, msg, ident=None):
527 531 """Serialize the message components to bytes.
528 532
529 533 This is roughly the inverse of deserialize. The serialize/deserialize
530 534 methods work with full message lists, whereas pack/unpack work with
531 535 the individual message parts in the message list.
532 536
533 537 Parameters
534 538 ----------
535 539 msg : dict or Message
536 540 The next message dict as returned by the self.msg method.
537 541
538 542 Returns
539 543 -------
540 544 msg_list : list
541 545 The list of bytes objects to be sent with the format::
542 546
543 547 [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,
544 548 p_metadata, p_content, buffer1, buffer2, ...]
545 549
546 550 In this list, the ``p_*`` entities are the packed or serialized
547 551 versions, so if JSON is used, these are utf8 encoded JSON strings.
548 552 """
549 553 content = msg.get('content', {})
550 554 if content is None:
551 555 content = self.none
552 556 elif isinstance(content, dict):
553 557 content = self.pack(content)
554 558 elif isinstance(content, bytes):
555 559 # content is already packed, as in a relayed message
556 560 pass
557 561 elif isinstance(content, unicode_type):
558 562 # should be bytes, but JSON often spits out unicode
559 563 content = content.encode('utf8')
560 564 else:
561 565 raise TypeError("Content incorrect type: %s"%type(content))
562 566
563 567 real_message = [self.pack(msg['header']),
564 568 self.pack(msg['parent_header']),
565 569 self.pack(msg['metadata']),
566 570 content,
567 571 ]
568 572
569 573 to_send = []
570 574
571 575 if isinstance(ident, list):
572 576 # accept list of idents
573 577 to_send.extend(ident)
574 578 elif ident is not None:
575 579 to_send.append(ident)
576 580 to_send.append(DELIM)
577 581
578 582 signature = self.sign(real_message)
579 583 to_send.append(signature)
580 584
581 585 to_send.extend(real_message)
582 586
583 587 return to_send
584 588
585 589 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
586 590 buffers=None, track=False, header=None, metadata=None):
587 591 """Build and send a message via stream or socket.
588 592
589 593 The message format used by this function internally is as follows:
590 594
591 595 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
592 596 buffer1,buffer2,...]
593 597
594 598 The serialize/deserialize methods convert the nested message dict into this
595 599 format.
596 600
597 601 Parameters
598 602 ----------
599 603
600 604 stream : zmq.Socket or ZMQStream
601 605 The socket-like object used to send the data.
602 606 msg_or_type : str or Message/dict
603 607 Normally, msg_or_type will be a msg_type unless a message is being
604 608 sent more than once. If a header is supplied, this can be set to
605 609 None and the msg_type will be pulled from the header.
606 610
607 611 content : dict or None
608 612 The content of the message (ignored if msg_or_type is a message).
609 613 header : dict or None
610 614 The header dict for the message (ignored if msg_to_type is a message).
611 615 parent : Message or dict or None
612 616 The parent or parent header describing the parent of this message
613 617 (ignored if msg_or_type is a message).
614 618 ident : bytes or list of bytes
615 619 The zmq.IDENTITY routing path.
616 620 metadata : dict or None
617 621 The metadata describing the message
618 622 buffers : list or None
619 623 The already-serialized buffers to be appended to the message.
620 624 track : bool
621 625 Whether to track. Only for use with Sockets, because ZMQStream
622 626 objects cannot track messages.
623 627
624 628
625 629 Returns
626 630 -------
627 631 msg : dict
628 632 The constructed message.
629 633 """
630 634 if not isinstance(stream, zmq.Socket):
631 635 # ZMQStreams and dummy sockets do not support tracking.
632 636 track = False
633 637
634 638 if isinstance(msg_or_type, (Message, dict)):
635 639 # We got a Message or message dict, not a msg_type so don't
636 640 # build a new Message.
637 641 msg = msg_or_type
638 642 buffers = buffers or msg.get('buffers', [])
639 643 else:
640 644 msg = self.msg(msg_or_type, content=content, parent=parent,
641 645 header=header, metadata=metadata)
642 646 if not os.getpid() == self.pid:
643 647 io.rprint("WARNING: attempted to send message from fork")
644 648 io.rprint(msg)
645 649 return
646 650 buffers = [] if buffers is None else buffers
647 651 if self.adapt_version:
648 652 msg = adapt(msg, self.adapt_version)
649 653 to_send = self.serialize(msg, ident)
650 654 to_send.extend(buffers)
651 655 longest = max([ len(s) for s in to_send ])
652 656 copy = (longest < self.copy_threshold)
653 657
654 658 if buffers and track and not copy:
655 659 # only really track when we are doing zero-copy buffers
656 660 tracker = stream.send_multipart(to_send, copy=False, track=True)
657 661 else:
658 662 # use dummy tracker, which will be done immediately
659 663 tracker = DONE
660 664 stream.send_multipart(to_send, copy=copy)
661 665
662 666 if self.debug:
663 667 pprint.pprint(msg)
664 668 pprint.pprint(to_send)
665 669 pprint.pprint(buffers)
666 670
667 671 msg['tracker'] = tracker
668 672
669 673 return msg
670 674
671 675 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
672 676 """Send a raw message via ident path.
673 677
674 678 This method is used to send a already serialized message.
675 679
676 680 Parameters
677 681 ----------
678 682 stream : ZMQStream or Socket
679 683 The ZMQ stream or socket to use for sending the message.
680 684 msg_list : list
681 685 The serialized list of messages to send. This only includes the
682 686 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
683 687 the message.
684 688 ident : ident or list
685 689 A single ident or a list of idents to use in sending.
686 690 """
687 691 to_send = []
688 692 if isinstance(ident, bytes):
689 693 ident = [ident]
690 694 if ident is not None:
691 695 to_send.extend(ident)
692 696
693 697 to_send.append(DELIM)
694 698 to_send.append(self.sign(msg_list))
695 699 to_send.extend(msg_list)
696 700 stream.send_multipart(to_send, flags, copy=copy)
697 701
698 702 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
699 703 """Receive and unpack a message.
700 704
701 705 Parameters
702 706 ----------
703 707 socket : ZMQStream or Socket
704 708 The socket or stream to use in receiving.
705 709
706 710 Returns
707 711 -------
708 712 [idents], msg
709 713 [idents] is a list of idents and msg is a nested message dict of
710 714 same format as self.msg returns.
711 715 """
712 716 if isinstance(socket, ZMQStream):
713 717 socket = socket.socket
714 718 try:
715 719 msg_list = socket.recv_multipart(mode, copy=copy)
716 720 except zmq.ZMQError as e:
717 721 if e.errno == zmq.EAGAIN:
718 722 # We can convert EAGAIN to None as we know in this case
719 723 # recv_multipart won't return None.
720 724 return None,None
721 725 else:
722 726 raise
723 727 # split multipart message into identity list and message dict
724 728 # invalid large messages can cause very expensive string comparisons
725 729 idents, msg_list = self.feed_identities(msg_list, copy)
726 730 try:
727 731 return idents, self.deserialize(msg_list, content=content, copy=copy)
728 732 except Exception as e:
729 733 # TODO: handle it
730 734 raise e
731 735
732 736 def feed_identities(self, msg_list, copy=True):
733 737 """Split the identities from the rest of the message.
734 738
735 739 Feed until DELIM is reached, then return the prefix as idents and
736 740 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
737 741 but that would be silly.
738 742
739 743 Parameters
740 744 ----------
741 745 msg_list : a list of Message or bytes objects
742 746 The message to be split.
743 747 copy : bool
744 748 flag determining whether the arguments are bytes or Messages
745 749
746 750 Returns
747 751 -------
748 752 (idents, msg_list) : two lists
749 753 idents will always be a list of bytes, each of which is a ZMQ
750 754 identity. msg_list will be a list of bytes or zmq.Messages of the
751 755 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
752 756 should be unpackable/unserializable via self.deserialize at this
753 757 point.
754 758 """
755 759 if copy:
756 760 idx = msg_list.index(DELIM)
757 761 return msg_list[:idx], msg_list[idx+1:]
758 762 else:
759 763 failed = True
760 764 for idx,m in enumerate(msg_list):
761 765 if m.bytes == DELIM:
762 766 failed = False
763 767 break
764 768 if failed:
765 769 raise ValueError("DELIM not in msg_list")
766 770 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
767 771 return [m.bytes for m in idents], msg_list
768 772
769 773 def _add_digest(self, signature):
770 774 """add a digest to history to protect against replay attacks"""
771 775 if self.digest_history_size == 0:
772 776 # no history, never add digests
773 777 return
774 778
775 779 self.digest_history.add(signature)
776 780 if len(self.digest_history) > self.digest_history_size:
777 781 # threshold reached, cull 10%
778 782 self._cull_digest_history()
779 783
780 784 def _cull_digest_history(self):
781 785 """cull the digest history
782 786
783 787 Removes a randomly selected 10% of the digest history
784 788 """
785 789 current = len(self.digest_history)
786 790 n_to_cull = max(int(current // 10), current - self.digest_history_size)
787 791 if n_to_cull >= current:
788 792 self.digest_history = set()
789 793 return
790 794 to_cull = random.sample(self.digest_history, n_to_cull)
791 795 self.digest_history.difference_update(to_cull)
792 796
793 797 def deserialize(self, msg_list, content=True, copy=True):
794 798 """Unserialize a msg_list to a nested message dict.
795 799
796 800 This is roughly the inverse of serialize. The serialize/deserialize
797 801 methods work with full message lists, whereas pack/unpack work with
798 802 the individual message parts in the message list.
799 803
800 804 Parameters
801 805 ----------
802 806 msg_list : list of bytes or Message objects
803 807 The list of message parts of the form [HMAC,p_header,p_parent,
804 808 p_metadata,p_content,buffer1,buffer2,...].
805 809 content : bool (True)
806 810 Whether to unpack the content dict (True), or leave it packed
807 811 (False).
808 812 copy : bool (True)
809 813 Whether to return the bytes (True), or the non-copying Message
810 814 object in each place (False).
811 815
812 816 Returns
813 817 -------
814 818 msg : dict
815 819 The nested message dict with top-level keys [header, parent_header,
816 820 content, buffers].
817 821 """
818 822 minlen = 5
819 823 message = {}
820 824 if not copy:
821 825 for i in range(minlen):
822 826 msg_list[i] = msg_list[i].bytes
823 827 if self.auth is not None:
824 828 signature = msg_list[0]
825 829 if not signature:
826 830 raise ValueError("Unsigned Message")
827 831 if signature in self.digest_history:
828 832 raise ValueError("Duplicate Signature: %r" % signature)
829 833 self._add_digest(signature)
830 834 check = self.sign(msg_list[1:5])
831 835 if not compare_digest(signature, check):
832 836 raise ValueError("Invalid Signature: %r" % signature)
833 837 if not len(msg_list) >= minlen:
834 838 raise TypeError("malformed message, must have at least %i elements"%minlen)
835 839 header = self.unpack(msg_list[1])
836 840 message['header'] = extract_dates(header)
837 841 message['msg_id'] = header['msg_id']
838 842 message['msg_type'] = header['msg_type']
839 843 message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
840 844 message['metadata'] = self.unpack(msg_list[3])
841 845 if content:
842 846 message['content'] = self.unpack(msg_list[4])
843 847 else:
844 848 message['content'] = msg_list[4]
845 849
846 850 message['buffers'] = msg_list[5:]
847 851 # adapt to the current version
848 852 return adapt(message)
849 853
850 854 def unserialize(self, *args, **kwargs):
851 855 warnings.warn(
852 856 "Session.unserialize is deprecated. Use Session.deserialize.",
853 857 DeprecationWarning,
854 858 )
855 859 return self.deserialize(*args, **kwargs)
856 860
857 861
858 862 def test_msg2obj():
859 863 am = dict(x=1)
860 864 ao = Message(am)
861 865 assert ao.x == am['x']
862 866
863 867 am['y'] = dict(z=1)
864 868 ao = Message(am)
865 869 assert ao.y.z == am['y']['z']
866 870
867 871 k1, k2 = 'y', 'z'
868 872 assert ao[k1][k2] == am[k1][k2]
869 873
870 874 am2 = dict(ao)
871 875 assert am['x'] == am2['x']
872 876 assert am['y']['z'] == am2['y']['z']
873 877
@@ -1,318 +1,318 b''
1 """test building messages with streamsession"""
1 """test building messages with Session"""
2 2
3 #-------------------------------------------------------------------------------
4 # Copyright (C) 2011 The IPython Development Team
5 #
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
8 #-------------------------------------------------------------------------------
9
10 #-------------------------------------------------------------------------------
11 # Imports
12 #-------------------------------------------------------------------------------
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
13 5
6 import hmac
14 7 import os
15 8 import uuid
16 9 from datetime import datetime
17 10
18 11 import zmq
19 12
20 13 from zmq.tests import BaseZMQTestCase
21 14 from zmq.eventloop.zmqstream import ZMQStream
22 15
23 16 from IPython.kernel.zmq import session as ss
24 17
25 18 from IPython.testing.decorators import skipif, module_not_available
26 19 from IPython.utils.py3compat import string_types
27 20 from IPython.utils import jsonutil
28 21
29 22 def _bad_packer(obj):
30 23 raise TypeError("I don't work")
31 24
32 25 def _bad_unpacker(bytes):
33 26 raise TypeError("I don't work either")
34 27
35 28 class SessionTestCase(BaseZMQTestCase):
36 29
37 30 def setUp(self):
38 31 BaseZMQTestCase.setUp(self)
39 32 self.session = ss.Session()
40 33
41 34
42 35 class TestSession(SessionTestCase):
43 36
44 37 def test_msg(self):
45 38 """message format"""
46 39 msg = self.session.msg('execute')
47 40 thekeys = set('header parent_header metadata content msg_type msg_id'.split())
48 41 s = set(msg.keys())
49 42 self.assertEqual(s, thekeys)
50 43 self.assertTrue(isinstance(msg['content'],dict))
51 44 self.assertTrue(isinstance(msg['metadata'],dict))
52 45 self.assertTrue(isinstance(msg['header'],dict))
53 46 self.assertTrue(isinstance(msg['parent_header'],dict))
54 47 self.assertTrue(isinstance(msg['msg_id'],str))
55 48 self.assertTrue(isinstance(msg['msg_type'],str))
56 49 self.assertEqual(msg['header']['msg_type'], 'execute')
57 50 self.assertEqual(msg['msg_type'], 'execute')
58 51
59 52 def test_serialize(self):
60 53 msg = self.session.msg('execute', content=dict(a=10, b=1.1))
61 54 msg_list = self.session.serialize(msg, ident=b'foo')
62 55 ident, msg_list = self.session.feed_identities(msg_list)
63 56 new_msg = self.session.deserialize(msg_list)
64 57 self.assertEqual(ident[0], b'foo')
65 58 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
66 59 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
67 60 self.assertEqual(new_msg['header'],msg['header'])
68 61 self.assertEqual(new_msg['content'],msg['content'])
69 62 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
70 63 self.assertEqual(new_msg['metadata'],msg['metadata'])
71 64 # ensure floats don't come out as Decimal:
72 65 self.assertEqual(type(new_msg['content']['b']),type(new_msg['content']['b']))
73 66
67 def test_default_secure(self):
68 self.assertIsInstance(self.session.key, bytes)
69 self.assertIsInstance(self.session.auth, hmac.HMAC)
70
74 71 def test_send(self):
75 72 ctx = zmq.Context.instance()
76 73 A = ctx.socket(zmq.PAIR)
77 74 B = ctx.socket(zmq.PAIR)
78 75 A.bind("inproc://test")
79 76 B.connect("inproc://test")
80 77
81 78 msg = self.session.msg('execute', content=dict(a=10))
82 79 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
83 80
84 81 ident, msg_list = self.session.feed_identities(B.recv_multipart())
85 82 new_msg = self.session.deserialize(msg_list)
86 83 self.assertEqual(ident[0], b'foo')
87 84 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
88 85 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
89 86 self.assertEqual(new_msg['header'],msg['header'])
90 87 self.assertEqual(new_msg['content'],msg['content'])
91 88 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
92 89 self.assertEqual(new_msg['metadata'],msg['metadata'])
93 90 self.assertEqual(new_msg['buffers'],[b'bar'])
94
91
95 92 content = msg['content']
96 93 header = msg['header']
94 header['date'] = datetime.now()
97 95 parent = msg['parent_header']
98 96 metadata = msg['metadata']
99 97 msg_type = header['msg_type']
100 98 self.session.send(A, None, content=content, parent=parent,
101 99 header=header, metadata=metadata, ident=b'foo', buffers=[b'bar'])
102 100 ident, msg_list = self.session.feed_identities(B.recv_multipart())
103 101 new_msg = self.session.deserialize(msg_list)
104 102 self.assertEqual(ident[0], b'foo')
105 103 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
106 104 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
107 105 self.assertEqual(new_msg['header'],msg['header'])
108 106 self.assertEqual(new_msg['content'],msg['content'])
109 107 self.assertEqual(new_msg['metadata'],msg['metadata'])
110 108 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
111 109 self.assertEqual(new_msg['buffers'],[b'bar'])
112
110
111 header['date'] = datetime.now()
112
113 113 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
114 114 ident, new_msg = self.session.recv(B)
115 115 self.assertEqual(ident[0], b'foo')
116 116 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
117 117 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
118 118 self.assertEqual(new_msg['header'],msg['header'])
119 119 self.assertEqual(new_msg['content'],msg['content'])
120 120 self.assertEqual(new_msg['metadata'],msg['metadata'])
121 121 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
122 122 self.assertEqual(new_msg['buffers'],[b'bar'])
123 123
124 124 A.close()
125 125 B.close()
126 126 ctx.term()
127 127
128 128 def test_args(self):
129 129 """initialization arguments for Session"""
130 130 s = self.session
131 131 self.assertTrue(s.pack is ss.default_packer)
132 132 self.assertTrue(s.unpack is ss.default_unpacker)
133 133 self.assertEqual(s.username, os.environ.get('USER', u'username'))
134 134
135 135 s = ss.Session()
136 136 self.assertEqual(s.username, os.environ.get('USER', u'username'))
137 137
138 138 self.assertRaises(TypeError, ss.Session, pack='hi')
139 139 self.assertRaises(TypeError, ss.Session, unpack='hi')
140 140 u = str(uuid.uuid4())
141 141 s = ss.Session(username=u'carrot', session=u)
142 142 self.assertEqual(s.session, u)
143 143 self.assertEqual(s.username, u'carrot')
144 144
145 145 def test_tracking(self):
146 146 """test tracking messages"""
147 147 a,b = self.create_bound_pair(zmq.PAIR, zmq.PAIR)
148 148 s = self.session
149 149 s.copy_threshold = 1
150 150 stream = ZMQStream(a)
151 151 msg = s.send(a, 'hello', track=False)
152 152 self.assertTrue(msg['tracker'] is ss.DONE)
153 153 msg = s.send(a, 'hello', track=True)
154 154 self.assertTrue(isinstance(msg['tracker'], zmq.MessageTracker))
155 155 M = zmq.Message(b'hi there', track=True)
156 156 msg = s.send(a, 'hello', buffers=[M], track=True)
157 157 t = msg['tracker']
158 158 self.assertTrue(isinstance(t, zmq.MessageTracker))
159 159 self.assertRaises(zmq.NotDone, t.wait, .1)
160 160 del M
161 161 t.wait(1) # this will raise
162 162
163 163
164 164 def test_unique_msg_ids(self):
165 165 """test that messages receive unique ids"""
166 166 ids = set()
167 167 for i in range(2**12):
168 168 h = self.session.msg_header('test')
169 169 msg_id = h['msg_id']
170 170 self.assertTrue(msg_id not in ids)
171 171 ids.add(msg_id)
172 172
173 173 def test_feed_identities(self):
174 174 """scrub the front for zmq IDENTITIES"""
175 175 theids = "engine client other".split()
176 176 content = dict(code='whoda',stuff=object())
177 177 themsg = self.session.msg('execute',content=content)
178 178 pmsg = theids
179 179
180 180 def test_session_id(self):
181 181 session = ss.Session()
182 182 # get bs before us
183 183 bs = session.bsession
184 184 us = session.session
185 185 self.assertEqual(us.encode('ascii'), bs)
186 186 session = ss.Session()
187 187 # get us before bs
188 188 us = session.session
189 189 bs = session.bsession
190 190 self.assertEqual(us.encode('ascii'), bs)
191 191 # change propagates:
192 192 session.session = 'something else'
193 193 bs = session.bsession
194 194 us = session.session
195 195 self.assertEqual(us.encode('ascii'), bs)
196 196 session = ss.Session(session='stuff')
197 197 # get us before bs
198 198 self.assertEqual(session.bsession, session.session.encode('ascii'))
199 199 self.assertEqual(b'stuff', session.bsession)
200 200
201 201 def test_zero_digest_history(self):
202 202 session = ss.Session(digest_history_size=0)
203 203 for i in range(11):
204 204 session._add_digest(uuid.uuid4().bytes)
205 205 self.assertEqual(len(session.digest_history), 0)
206 206
207 207 def test_cull_digest_history(self):
208 208 session = ss.Session(digest_history_size=100)
209 209 for i in range(100):
210 210 session._add_digest(uuid.uuid4().bytes)
211 211 self.assertTrue(len(session.digest_history) == 100)
212 212 session._add_digest(uuid.uuid4().bytes)
213 213 self.assertTrue(len(session.digest_history) == 91)
214 214 for i in range(9):
215 215 session._add_digest(uuid.uuid4().bytes)
216 216 self.assertTrue(len(session.digest_history) == 100)
217 217 session._add_digest(uuid.uuid4().bytes)
218 218 self.assertTrue(len(session.digest_history) == 91)
219 219
220 220 def test_bad_pack(self):
221 221 try:
222 222 session = ss.Session(pack=_bad_packer)
223 223 except ValueError as e:
224 224 self.assertIn("could not serialize", str(e))
225 225 self.assertIn("don't work", str(e))
226 226 else:
227 227 self.fail("Should have raised ValueError")
228 228
229 229 def test_bad_unpack(self):
230 230 try:
231 231 session = ss.Session(unpack=_bad_unpacker)
232 232 except ValueError as e:
233 233 self.assertIn("could not handle output", str(e))
234 234 self.assertIn("don't work either", str(e))
235 235 else:
236 236 self.fail("Should have raised ValueError")
237 237
238 238 def test_bad_packer(self):
239 239 try:
240 240 session = ss.Session(packer=__name__ + '._bad_packer')
241 241 except ValueError as e:
242 242 self.assertIn("could not serialize", str(e))
243 243 self.assertIn("don't work", str(e))
244 244 else:
245 245 self.fail("Should have raised ValueError")
246 246
247 247 def test_bad_unpacker(self):
248 248 try:
249 249 session = ss.Session(unpacker=__name__ + '._bad_unpacker')
250 250 except ValueError as e:
251 251 self.assertIn("could not handle output", str(e))
252 252 self.assertIn("don't work either", str(e))
253 253 else:
254 254 self.fail("Should have raised ValueError")
255 255
256 256 def test_bad_roundtrip(self):
257 257 with self.assertRaises(ValueError):
258 258 session = ss.Session(unpack=lambda b: 5)
259 259
260 260 def _datetime_test(self, session):
261 261 content = dict(t=datetime.now())
262 262 metadata = dict(t=datetime.now())
263 263 p = session.msg('msg')
264 264 msg = session.msg('msg', content=content, metadata=metadata, parent=p['header'])
265 265 smsg = session.serialize(msg)
266 266 msg2 = session.deserialize(session.feed_identities(smsg)[1])
267 267 assert isinstance(msg2['header']['date'], datetime)
268 268 self.assertEqual(msg['header'], msg2['header'])
269 269 self.assertEqual(msg['parent_header'], msg2['parent_header'])
270 270 self.assertEqual(msg['parent_header'], msg2['parent_header'])
271 271 assert isinstance(msg['content']['t'], datetime)
272 272 assert isinstance(msg['metadata']['t'], datetime)
273 273 assert isinstance(msg2['content']['t'], string_types)
274 274 assert isinstance(msg2['metadata']['t'], string_types)
275 275 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
276 276 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
277 277
278 278 def test_datetimes(self):
279 279 self._datetime_test(self.session)
280 280
281 281 def test_datetimes_pickle(self):
282 282 session = ss.Session(packer='pickle')
283 283 self._datetime_test(session)
284 284
285 285 @skipif(module_not_available('msgpack'))
286 286 def test_datetimes_msgpack(self):
287 287 import msgpack
288 288
289 289 session = ss.Session(
290 290 pack=msgpack.packb,
291 291 unpack=lambda buf: msgpack.unpackb(buf, encoding='utf8'),
292 292 )
293 293 self._datetime_test(session)
294 294
295 295 def test_send_raw(self):
296 296 ctx = zmq.Context.instance()
297 297 A = ctx.socket(zmq.PAIR)
298 298 B = ctx.socket(zmq.PAIR)
299 299 A.bind("inproc://test")
300 300 B.connect("inproc://test")
301 301
302 302 msg = self.session.msg('execute', content=dict(a=10))
303 303 msg_list = [self.session.pack(msg[part]) for part in
304 304 ['header', 'parent_header', 'metadata', 'content']]
305 305 self.session.send_raw(A, msg_list, ident=b'foo')
306 306
307 307 ident, new_msg_list = self.session.feed_identities(B.recv_multipart())
308 308 new_msg = self.session.deserialize(new_msg_list)
309 309 self.assertEqual(ident[0], b'foo')
310 310 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
311 311 self.assertEqual(new_msg['header'],msg['header'])
312 312 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
313 313 self.assertEqual(new_msg['content'],msg['content'])
314 314 self.assertEqual(new_msg['metadata'],msg['metadata'])
315 315
316 316 A.close()
317 317 B.close()
318 318 ctx.term()
@@ -1,547 +1,545 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3 """
4 4 The IPython controller application.
5 5
6 6 Authors:
7 7
8 8 * Brian Granger
9 9 * MinRK
10 10
11 11 """
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Copyright (C) 2008 The IPython Development Team
15 15 #
16 16 # Distributed under the terms of the BSD License. The full license is in
17 17 # the file COPYING, distributed as part of this software.
18 18 #-----------------------------------------------------------------------------
19 19
20 20 #-----------------------------------------------------------------------------
21 21 # Imports
22 22 #-----------------------------------------------------------------------------
23 23
24 24 from __future__ import with_statement
25 25
26 26 import json
27 27 import os
28 28 import stat
29 29 import sys
30 30
31 31 from multiprocessing import Process
32 32 from signal import signal, SIGINT, SIGABRT, SIGTERM
33 33
34 34 import zmq
35 35 from zmq.devices import ProcessMonitoredQueue
36 36 from zmq.log.handlers import PUBHandler
37 37
38 38 from IPython.core.profiledir import ProfileDir
39 39
40 40 from IPython.parallel.apps.baseapp import (
41 41 BaseParallelApplication,
42 42 base_aliases,
43 43 base_flags,
44 44 catch_config_error,
45 45 )
46 46 from IPython.utils.importstring import import_item
47 47 from IPython.utils.localinterfaces import localhost, public_ips
48 48 from IPython.utils.traitlets import Instance, Unicode, Bool, List, Dict, TraitError
49 49
50 50 from IPython.kernel.zmq.session import (
51 Session, session_aliases, session_flags, default_secure
51 Session, session_aliases, session_flags,
52 52 )
53 53
54 54 from IPython.parallel.controller.heartmonitor import HeartMonitor
55 55 from IPython.parallel.controller.hub import HubFactory
56 56 from IPython.parallel.controller.scheduler import TaskScheduler,launch_scheduler
57 57 from IPython.parallel.controller.dictdb import DictDB
58 58
59 59 from IPython.parallel.util import split_url, disambiguate_url, set_hwm
60 60
61 61 # conditional import of SQLiteDB / MongoDB backend class
62 62 real_dbs = []
63 63
64 64 try:
65 65 from IPython.parallel.controller.sqlitedb import SQLiteDB
66 66 except ImportError:
67 67 pass
68 68 else:
69 69 real_dbs.append(SQLiteDB)
70 70
71 71 try:
72 72 from IPython.parallel.controller.mongodb import MongoDB
73 73 except ImportError:
74 74 pass
75 75 else:
76 76 real_dbs.append(MongoDB)
77 77
78 78
79 79
80 80 #-----------------------------------------------------------------------------
81 81 # Module level variables
82 82 #-----------------------------------------------------------------------------
83 83
84 84
85 85 _description = """Start the IPython controller for parallel computing.
86 86
87 87 The IPython controller provides a gateway between the IPython engines and
88 88 clients. The controller needs to be started before the engines and can be
89 89 configured using command line options or using a cluster directory. Cluster
90 90 directories contain config, log and security files and are usually located in
91 91 your ipython directory and named as "profile_name". See the `profile`
92 92 and `profile-dir` options for details.
93 93 """
94 94
95 95 _examples = """
96 96 ipcontroller --ip=192.168.0.1 --port=1000 # listen on ip, port for engines
97 97 ipcontroller --scheme=pure # use the pure zeromq scheduler
98 98 """
99 99
100 100
101 101 #-----------------------------------------------------------------------------
102 102 # The main application
103 103 #-----------------------------------------------------------------------------
104 104 flags = {}
105 105 flags.update(base_flags)
106 106 flags.update({
107 107 'usethreads' : ( {'IPControllerApp' : {'use_threads' : True}},
108 108 'Use threads instead of processes for the schedulers'),
109 109 'sqlitedb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.sqlitedb.SQLiteDB'}},
110 110 'use the SQLiteDB backend'),
111 111 'mongodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.mongodb.MongoDB'}},
112 112 'use the MongoDB backend'),
113 113 'dictdb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.DictDB'}},
114 114 'use the in-memory DictDB backend'),
115 115 'nodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.NoDB'}},
116 116 """use dummy DB backend, which doesn't store any information.
117 117
118 118 This is the default as of IPython 0.13.
119 119
120 120 To enable delayed or repeated retrieval of results from the Hub,
121 121 select one of the true db backends.
122 122 """),
123 123 'reuse' : ({'IPControllerApp' : {'reuse_files' : True}},
124 124 'reuse existing json connection files'),
125 125 'restore' : ({'IPControllerApp' : {'restore_engines' : True, 'reuse_files' : True}},
126 126 'Attempt to restore engines from a JSON file. '
127 127 'For use when resuming a crashed controller'),
128 128 })
129 129
130 130 flags.update(session_flags)
131 131
132 132 aliases = dict(
133 133 ssh = 'IPControllerApp.ssh_server',
134 134 enginessh = 'IPControllerApp.engine_ssh_server',
135 135 location = 'IPControllerApp.location',
136 136
137 137 url = 'HubFactory.url',
138 138 ip = 'HubFactory.ip',
139 139 transport = 'HubFactory.transport',
140 140 port = 'HubFactory.regport',
141 141
142 142 ping = 'HeartMonitor.period',
143 143
144 144 scheme = 'TaskScheduler.scheme_name',
145 145 hwm = 'TaskScheduler.hwm',
146 146 )
147 147 aliases.update(base_aliases)
148 148 aliases.update(session_aliases)
149 149
150 150 class IPControllerApp(BaseParallelApplication):
151 151
152 152 name = u'ipcontroller'
153 153 description = _description
154 154 examples = _examples
155 155 classes = [ProfileDir, Session, HubFactory, TaskScheduler, HeartMonitor, DictDB] + real_dbs
156 156
157 157 # change default to True
158 158 auto_create = Bool(True, config=True,
159 159 help="""Whether to create profile dir if it doesn't exist.""")
160 160
161 161 reuse_files = Bool(False, config=True,
162 162 help="""Whether to reuse existing json connection files.
163 163 If False, connection files will be removed on a clean exit.
164 164 """
165 165 )
166 166 restore_engines = Bool(False, config=True,
167 167 help="""Reload engine state from JSON file
168 168 """
169 169 )
170 170 ssh_server = Unicode(u'', config=True,
171 171 help="""ssh url for clients to use when connecting to the Controller
172 172 processes. It should be of the form: [user@]server[:port]. The
173 173 Controller's listening addresses must be accessible from the ssh server""",
174 174 )
175 175 engine_ssh_server = Unicode(u'', config=True,
176 176 help="""ssh url for engines to use when connecting to the Controller
177 177 processes. It should be of the form: [user@]server[:port]. The
178 178 Controller's listening addresses must be accessible from the ssh server""",
179 179 )
180 180 location = Unicode(u'', config=True,
181 181 help="""The external IP or domain name of the Controller, used for disambiguating
182 182 engine and client connections.""",
183 183 )
184 184 import_statements = List([], config=True,
185 185 help="import statements to be run at startup. Necessary in some environments"
186 186 )
187 187
188 188 use_threads = Bool(False, config=True,
189 189 help='Use threads instead of processes for the schedulers',
190 190 )
191 191
192 192 engine_json_file = Unicode('ipcontroller-engine.json', config=True,
193 193 help="JSON filename where engine connection info will be stored.")
194 194 client_json_file = Unicode('ipcontroller-client.json', config=True,
195 195 help="JSON filename where client connection info will be stored.")
196 196
197 197 def _cluster_id_changed(self, name, old, new):
198 198 super(IPControllerApp, self)._cluster_id_changed(name, old, new)
199 199 self.engine_json_file = "%s-engine.json" % self.name
200 200 self.client_json_file = "%s-client.json" % self.name
201 201
202 202
203 203 # internal
204 204 children = List()
205 205 mq_class = Unicode('zmq.devices.ProcessMonitoredQueue')
206 206
207 207 def _use_threads_changed(self, name, old, new):
208 208 self.mq_class = 'zmq.devices.%sMonitoredQueue'%('Thread' if new else 'Process')
209 209
210 210 write_connection_files = Bool(True,
211 211 help="""Whether to write connection files to disk.
212 212 True in all cases other than runs with `reuse_files=True` *after the first*
213 213 """
214 214 )
215 215
216 216 aliases = Dict(aliases)
217 217 flags = Dict(flags)
218 218
219 219
220 220 def save_connection_dict(self, fname, cdict):
221 221 """save a connection dict to json file."""
222 222 c = self.config
223 223 url = cdict['registration']
224 224 location = cdict['location']
225 225
226 226 if not location:
227 227 if public_ips():
228 228 location = public_ips()[-1]
229 229 else:
230 230 self.log.warn("Could not identify this machine's IP, assuming %s."
231 231 " You may need to specify '--location=<external_ip_address>' to help"
232 232 " IPython decide when to connect via loopback." % localhost() )
233 233 location = localhost()
234 234 cdict['location'] = location
235 235 fname = os.path.join(self.profile_dir.security_dir, fname)
236 236 self.log.info("writing connection info to %s", fname)
237 237 with open(fname, 'w') as f:
238 238 f.write(json.dumps(cdict, indent=2))
239 239 os.chmod(fname, stat.S_IRUSR|stat.S_IWUSR)
240 240
241 241 def load_config_from_json(self):
242 242 """load config from existing json connector files."""
243 243 c = self.config
244 244 self.log.debug("loading config from JSON")
245 245
246 246 # load engine config
247 247
248 248 fname = os.path.join(self.profile_dir.security_dir, self.engine_json_file)
249 249 self.log.info("loading connection info from %s", fname)
250 250 with open(fname) as f:
251 251 ecfg = json.loads(f.read())
252 252
253 253 # json gives unicode, Session.key wants bytes
254 254 c.Session.key = ecfg['key'].encode('ascii')
255 255
256 256 xport,ip = ecfg['interface'].split('://')
257 257
258 258 c.HubFactory.engine_ip = ip
259 259 c.HubFactory.engine_transport = xport
260 260
261 261 self.location = ecfg['location']
262 262 if not self.engine_ssh_server:
263 263 self.engine_ssh_server = ecfg['ssh']
264 264
265 265 # load client config
266 266
267 267 fname = os.path.join(self.profile_dir.security_dir, self.client_json_file)
268 268 self.log.info("loading connection info from %s", fname)
269 269 with open(fname) as f:
270 270 ccfg = json.loads(f.read())
271 271
272 272 for key in ('key', 'registration', 'pack', 'unpack', 'signature_scheme'):
273 273 assert ccfg[key] == ecfg[key], "mismatch between engine and client info: %r" % key
274 274
275 275 xport,addr = ccfg['interface'].split('://')
276 276
277 277 c.HubFactory.client_transport = xport
278 278 c.HubFactory.client_ip = ip
279 279 if not self.ssh_server:
280 280 self.ssh_server = ccfg['ssh']
281 281
282 282 # load port config:
283 283 c.HubFactory.regport = ecfg['registration']
284 284 c.HubFactory.hb = (ecfg['hb_ping'], ecfg['hb_pong'])
285 285 c.HubFactory.control = (ccfg['control'], ecfg['control'])
286 286 c.HubFactory.mux = (ccfg['mux'], ecfg['mux'])
287 287 c.HubFactory.task = (ccfg['task'], ecfg['task'])
288 288 c.HubFactory.iopub = (ccfg['iopub'], ecfg['iopub'])
289 289 c.HubFactory.notifier_port = ccfg['notification']
290 290
291 291 def cleanup_connection_files(self):
292 292 if self.reuse_files:
293 293 self.log.debug("leaving JSON connection files for reuse")
294 294 return
295 295 self.log.debug("cleaning up JSON connection files")
296 296 for f in (self.client_json_file, self.engine_json_file):
297 297 f = os.path.join(self.profile_dir.security_dir, f)
298 298 try:
299 299 os.remove(f)
300 300 except Exception as e:
301 301 self.log.error("Failed to cleanup connection file: %s", e)
302 302 else:
303 303 self.log.debug(u"removed %s", f)
304 304
305 305 def load_secondary_config(self):
306 306 """secondary config, loading from JSON and setting defaults"""
307 307 if self.reuse_files:
308 308 try:
309 309 self.load_config_from_json()
310 310 except (AssertionError,IOError) as e:
311 311 self.log.error("Could not load config from JSON: %s" % e)
312 312 else:
313 313 # successfully loaded config from JSON, and reuse=True
314 314 # no need to wite back the same file
315 315 self.write_connection_files = False
316 316
317 # switch Session.key default to secure
318 default_secure(self.config)
319 317 self.log.debug("Config changed")
320 318 self.log.debug(repr(self.config))
321 319
322 320 def init_hub(self):
323 321 c = self.config
324 322
325 323 self.do_import_statements()
326 324
327 325 try:
328 326 self.factory = HubFactory(config=c, log=self.log)
329 327 # self.start_logging()
330 328 self.factory.init_hub()
331 329 except TraitError:
332 330 raise
333 331 except Exception:
334 332 self.log.error("Couldn't construct the Controller", exc_info=True)
335 333 self.exit(1)
336 334
337 335 if self.write_connection_files:
338 336 # save to new json config files
339 337 f = self.factory
340 338 base = {
341 339 'key' : f.session.key.decode('ascii'),
342 340 'location' : self.location,
343 341 'pack' : f.session.packer,
344 342 'unpack' : f.session.unpacker,
345 343 'signature_scheme' : f.session.signature_scheme,
346 344 }
347 345
348 346 cdict = {'ssh' : self.ssh_server}
349 347 cdict.update(f.client_info)
350 348 cdict.update(base)
351 349 self.save_connection_dict(self.client_json_file, cdict)
352 350
353 351 edict = {'ssh' : self.engine_ssh_server}
354 352 edict.update(f.engine_info)
355 353 edict.update(base)
356 354 self.save_connection_dict(self.engine_json_file, edict)
357 355
358 356 fname = "engines%s.json" % self.cluster_id
359 357 self.factory.hub.engine_state_file = os.path.join(self.profile_dir.log_dir, fname)
360 358 if self.restore_engines:
361 359 self.factory.hub._load_engine_state()
362 360
363 361 def init_schedulers(self):
364 362 children = self.children
365 363 mq = import_item(str(self.mq_class))
366 364
367 365 f = self.factory
368 366 ident = f.session.bsession
369 367 # disambiguate url, in case of *
370 368 monitor_url = disambiguate_url(f.monitor_url)
371 369 # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url
372 370 # IOPub relay (in a Process)
373 371 q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A',b'iopub')
374 372 q.bind_in(f.client_url('iopub'))
375 373 q.setsockopt_in(zmq.IDENTITY, ident + b"_iopub")
376 374 q.bind_out(f.engine_url('iopub'))
377 375 q.setsockopt_out(zmq.SUBSCRIBE, b'')
378 376 q.connect_mon(monitor_url)
379 377 q.daemon=True
380 378 children.append(q)
381 379
382 380 # Multiplexer Queue (in a Process)
383 381 q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out')
384 382
385 383 q.bind_in(f.client_url('mux'))
386 384 q.setsockopt_in(zmq.IDENTITY, b'mux_in')
387 385 q.bind_out(f.engine_url('mux'))
388 386 q.setsockopt_out(zmq.IDENTITY, b'mux_out')
389 387 q.connect_mon(monitor_url)
390 388 q.daemon=True
391 389 children.append(q)
392 390
393 391 # Control Queue (in a Process)
394 392 q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol')
395 393 q.bind_in(f.client_url('control'))
396 394 q.setsockopt_in(zmq.IDENTITY, b'control_in')
397 395 q.bind_out(f.engine_url('control'))
398 396 q.setsockopt_out(zmq.IDENTITY, b'control_out')
399 397 q.connect_mon(monitor_url)
400 398 q.daemon=True
401 399 children.append(q)
402 400 if 'TaskScheduler.scheme_name' in self.config:
403 401 scheme = self.config.TaskScheduler.scheme_name
404 402 else:
405 403 scheme = TaskScheduler.scheme_name.get_default_value()
406 404 # Task Queue (in a Process)
407 405 if scheme == 'pure':
408 406 self.log.warn("task::using pure DEALER Task scheduler")
409 407 q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask')
410 408 # q.setsockopt_out(zmq.HWM, hub.hwm)
411 409 q.bind_in(f.client_url('task'))
412 410 q.setsockopt_in(zmq.IDENTITY, b'task_in')
413 411 q.bind_out(f.engine_url('task'))
414 412 q.setsockopt_out(zmq.IDENTITY, b'task_out')
415 413 q.connect_mon(monitor_url)
416 414 q.daemon=True
417 415 children.append(q)
418 416 elif scheme == 'none':
419 417 self.log.warn("task::using no Task scheduler")
420 418
421 419 else:
422 420 self.log.info("task::using Python %s Task scheduler"%scheme)
423 421 sargs = (f.client_url('task'), f.engine_url('task'),
424 422 monitor_url, disambiguate_url(f.client_url('notification')),
425 423 disambiguate_url(f.client_url('registration')),
426 424 )
427 425 kwargs = dict(logname='scheduler', loglevel=self.log_level,
428 426 log_url = self.log_url, config=dict(self.config))
429 427 if 'Process' in self.mq_class:
430 428 # run the Python scheduler in a Process
431 429 q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs)
432 430 q.daemon=True
433 431 children.append(q)
434 432 else:
435 433 # single-threaded Controller
436 434 kwargs['in_thread'] = True
437 435 launch_scheduler(*sargs, **kwargs)
438 436
439 437 # set unlimited HWM for all relay devices
440 438 if hasattr(zmq, 'SNDHWM'):
441 439 q = children[0]
442 440 q.setsockopt_in(zmq.RCVHWM, 0)
443 441 q.setsockopt_out(zmq.SNDHWM, 0)
444 442
445 443 for q in children[1:]:
446 444 if not hasattr(q, 'setsockopt_in'):
447 445 continue
448 446 q.setsockopt_in(zmq.SNDHWM, 0)
449 447 q.setsockopt_in(zmq.RCVHWM, 0)
450 448 q.setsockopt_out(zmq.SNDHWM, 0)
451 449 q.setsockopt_out(zmq.RCVHWM, 0)
452 450 q.setsockopt_mon(zmq.SNDHWM, 0)
453 451
454 452
455 453 def terminate_children(self):
456 454 child_procs = []
457 455 for child in self.children:
458 456 if isinstance(child, ProcessMonitoredQueue):
459 457 child_procs.append(child.launcher)
460 458 elif isinstance(child, Process):
461 459 child_procs.append(child)
462 460 if child_procs:
463 461 self.log.critical("terminating children...")
464 462 for child in child_procs:
465 463 try:
466 464 child.terminate()
467 465 except OSError:
468 466 # already dead
469 467 pass
470 468
471 469 def handle_signal(self, sig, frame):
472 470 self.log.critical("Received signal %i, shutting down", sig)
473 471 self.terminate_children()
474 472 self.loop.stop()
475 473
476 474 def init_signal(self):
477 475 for sig in (SIGINT, SIGABRT, SIGTERM):
478 476 signal(sig, self.handle_signal)
479 477
480 478 def do_import_statements(self):
481 479 statements = self.import_statements
482 480 for s in statements:
483 481 try:
484 482 self.log.msg("Executing statement: '%s'" % s)
485 483 exec(s, globals(), locals())
486 484 except:
487 485 self.log.msg("Error running statement: %s" % s)
488 486
489 487 def forward_logging(self):
490 488 if self.log_url:
491 489 self.log.info("Forwarding logging to %s"%self.log_url)
492 490 context = zmq.Context.instance()
493 491 lsock = context.socket(zmq.PUB)
494 492 lsock.connect(self.log_url)
495 493 handler = PUBHandler(lsock)
496 494 handler.root_topic = 'controller'
497 495 handler.setLevel(self.log_level)
498 496 self.log.addHandler(handler)
499 497
500 498 @catch_config_error
501 499 def initialize(self, argv=None):
502 500 super(IPControllerApp, self).initialize(argv)
503 501 self.forward_logging()
504 502 self.load_secondary_config()
505 503 self.init_hub()
506 504 self.init_schedulers()
507 505
508 506 def start(self):
509 507 # Start the subprocesses:
510 508 self.factory.start()
511 509 # children must be started before signals are setup,
512 510 # otherwise signal-handling will fire multiple times
513 511 for child in self.children:
514 512 child.start()
515 513 self.init_signal()
516 514
517 515 self.write_pid_file(overwrite=True)
518 516
519 517 try:
520 518 self.factory.loop.start()
521 519 except KeyboardInterrupt:
522 520 self.log.critical("Interrupted, Exiting...\n")
523 521 finally:
524 522 self.cleanup_connection_files()
525 523
526 524
527 525 def launch_new_instance(*args, **kwargs):
528 526 """Create and run the IPython controller"""
529 527 if sys.platform == 'win32':
530 528 # make sure we don't get called from a multiprocessing subprocess
531 529 # this can result in infinite Controllers being started on Windows
532 530 # which doesn't have a proper fork, so multiprocessing is wonky
533 531
534 532 # this only comes up when IPython has been installed using vanilla
535 533 # setuptools, and *not* distribute.
536 534 import multiprocessing
537 535 p = multiprocessing.current_process()
538 536 # the main process has name 'MainProcess'
539 537 # subprocesses will have names like 'Process-1'
540 538 if p.name != 'MainProcess':
541 539 # we are a subprocess, don't start another Controller!
542 540 return
543 541 return IPControllerApp.launch_instance(*args, **kwargs)
544 542
545 543
546 544 if __name__ == '__main__':
547 545 launch_new_instance()
General Comments 0
You need to be logged in to leave comments. Login now