##// END OF EJS Templates
Merge pull request #7832 from minrk/default-secure-session...
Matthias Bussonnier -
r20576:478a8026 merge
parent child Browse files
Show More
@@ -1,345 +1,344 b''
1 """ A minimal application base mixin for all ZMQ based IPython frontends.
1 """ A minimal application base mixin for all ZMQ based IPython frontends.
2
2
3 This is not a complete console app, as subprocess will not be able to receive
3 This is not a complete console app, as subprocess will not be able to receive
4 input, there is no real readline support, among other limitations. This is a
4 input, there is no real readline support, among other limitations. This is a
5 refactoring of what used to be the IPython/qt/console/qtconsoleapp.py
5 refactoring of what used to be the IPython/qt/console/qtconsoleapp.py
6 """
6 """
7 # Copyright (c) IPython Development Team.
7 # Copyright (c) IPython Development Team.
8 # Distributed under the terms of the Modified BSD License.
8 # Distributed under the terms of the Modified BSD License.
9
9
10 import atexit
10 import atexit
11 import os
11 import os
12 import signal
12 import signal
13 import sys
13 import sys
14 import uuid
14 import uuid
15
15
16
16
17 from IPython.config.application import boolean_flag
17 from IPython.config.application import boolean_flag
18 from IPython.core.profiledir import ProfileDir
18 from IPython.core.profiledir import ProfileDir
19 from IPython.kernel.blocking import BlockingKernelClient
19 from IPython.kernel.blocking import BlockingKernelClient
20 from IPython.kernel import KernelManager
20 from IPython.kernel import KernelManager
21 from IPython.kernel import tunnel_to_kernel, find_connection_file, swallow_argv
21 from IPython.kernel import tunnel_to_kernel, find_connection_file, swallow_argv
22 from IPython.kernel.kernelspec import NoSuchKernel
22 from IPython.kernel.kernelspec import NoSuchKernel
23 from IPython.utils.path import filefind
23 from IPython.utils.path import filefind
24 from IPython.utils.traitlets import (
24 from IPython.utils.traitlets import (
25 Dict, List, Unicode, CUnicode, CBool, Any
25 Dict, List, Unicode, CUnicode, CBool, Any
26 )
26 )
27 from IPython.kernel.zmq.kernelapp import (
27 from IPython.kernel.zmq.kernelapp import (
28 kernel_flags,
28 kernel_flags,
29 kernel_aliases,
29 kernel_aliases,
30 IPKernelApp
30 IPKernelApp
31 )
31 )
32 from IPython.kernel.zmq.pylab.config import InlineBackend
32 from IPython.kernel.zmq.pylab.config import InlineBackend
33 from IPython.kernel.zmq.session import Session, default_secure
33 from IPython.kernel.zmq.session import Session
34 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
34 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
35 from IPython.kernel.connect import ConnectionFileMixin
35 from IPython.kernel.connect import ConnectionFileMixin
36
36
37 from IPython.utils.localinterfaces import localhost
37 from IPython.utils.localinterfaces import localhost
38
38
39 #-----------------------------------------------------------------------------
39 #-----------------------------------------------------------------------------
40 # Aliases and Flags
40 # Aliases and Flags
41 #-----------------------------------------------------------------------------
41 #-----------------------------------------------------------------------------
42
42
43 flags = dict(kernel_flags)
43 flags = dict(kernel_flags)
44
44
45 # the flags that are specific to the frontend
45 # the flags that are specific to the frontend
46 # these must be scrubbed before being passed to the kernel,
46 # these must be scrubbed before being passed to the kernel,
47 # or it will raise an error on unrecognized flags
47 # or it will raise an error on unrecognized flags
48 app_flags = {
48 app_flags = {
49 'existing' : ({'IPythonConsoleApp' : {'existing' : 'kernel*.json'}},
49 'existing' : ({'IPythonConsoleApp' : {'existing' : 'kernel*.json'}},
50 "Connect to an existing kernel. If no argument specified, guess most recent"),
50 "Connect to an existing kernel. If no argument specified, guess most recent"),
51 }
51 }
52 app_flags.update(boolean_flag(
52 app_flags.update(boolean_flag(
53 'confirm-exit', 'IPythonConsoleApp.confirm_exit',
53 'confirm-exit', 'IPythonConsoleApp.confirm_exit',
54 """Set to display confirmation dialog on exit. You can always use 'exit' or 'quit',
54 """Set to display confirmation dialog on exit. You can always use 'exit' or 'quit',
55 to force a direct exit without any confirmation.
55 to force a direct exit without any confirmation.
56 """,
56 """,
57 """Don't prompt the user when exiting. This will terminate the kernel
57 """Don't prompt the user when exiting. This will terminate the kernel
58 if it is owned by the frontend, and leave it alive if it is external.
58 if it is owned by the frontend, and leave it alive if it is external.
59 """
59 """
60 ))
60 ))
61 flags.update(app_flags)
61 flags.update(app_flags)
62
62
63 aliases = dict(kernel_aliases)
63 aliases = dict(kernel_aliases)
64
64
65 # also scrub aliases from the frontend
65 # also scrub aliases from the frontend
66 app_aliases = dict(
66 app_aliases = dict(
67 ip = 'IPythonConsoleApp.ip',
67 ip = 'IPythonConsoleApp.ip',
68 transport = 'IPythonConsoleApp.transport',
68 transport = 'IPythonConsoleApp.transport',
69 hb = 'IPythonConsoleApp.hb_port',
69 hb = 'IPythonConsoleApp.hb_port',
70 shell = 'IPythonConsoleApp.shell_port',
70 shell = 'IPythonConsoleApp.shell_port',
71 iopub = 'IPythonConsoleApp.iopub_port',
71 iopub = 'IPythonConsoleApp.iopub_port',
72 stdin = 'IPythonConsoleApp.stdin_port',
72 stdin = 'IPythonConsoleApp.stdin_port',
73 existing = 'IPythonConsoleApp.existing',
73 existing = 'IPythonConsoleApp.existing',
74 f = 'IPythonConsoleApp.connection_file',
74 f = 'IPythonConsoleApp.connection_file',
75
75
76 kernel = 'IPythonConsoleApp.kernel_name',
76 kernel = 'IPythonConsoleApp.kernel_name',
77
77
78 ssh = 'IPythonConsoleApp.sshserver',
78 ssh = 'IPythonConsoleApp.sshserver',
79 )
79 )
80 aliases.update(app_aliases)
80 aliases.update(app_aliases)
81
81
82 #-----------------------------------------------------------------------------
82 #-----------------------------------------------------------------------------
83 # Classes
83 # Classes
84 #-----------------------------------------------------------------------------
84 #-----------------------------------------------------------------------------
85
85
86 classes = [KernelManager, ProfileDir, Session]
86 classes = [KernelManager, ProfileDir, Session]
87
87
88 class IPythonConsoleApp(ConnectionFileMixin):
88 class IPythonConsoleApp(ConnectionFileMixin):
89 name = 'ipython-console-mixin'
89 name = 'ipython-console-mixin'
90
90
91 description = """
91 description = """
92 The IPython Mixin Console.
92 The IPython Mixin Console.
93
93
94 This class contains the common portions of console client (QtConsole,
94 This class contains the common portions of console client (QtConsole,
95 ZMQ-based terminal console, etc). It is not a full console, in that
95 ZMQ-based terminal console, etc). It is not a full console, in that
96 launched terminal subprocesses will not be able to accept input.
96 launched terminal subprocesses will not be able to accept input.
97
97
98 The Console using this mixing supports various extra features beyond
98 The Console using this mixing supports various extra features beyond
99 the single-process Terminal IPython shell, such as connecting to
99 the single-process Terminal IPython shell, such as connecting to
100 existing kernel, via:
100 existing kernel, via:
101
101
102 ipython <appname> --existing
102 ipython <appname> --existing
103
103
104 as well as tunnel via SSH
104 as well as tunnel via SSH
105
105
106 """
106 """
107
107
108 classes = classes
108 classes = classes
109 flags = Dict(flags)
109 flags = Dict(flags)
110 aliases = Dict(aliases)
110 aliases = Dict(aliases)
111 kernel_manager_class = KernelManager
111 kernel_manager_class = KernelManager
112 kernel_client_class = BlockingKernelClient
112 kernel_client_class = BlockingKernelClient
113
113
114 kernel_argv = List(Unicode)
114 kernel_argv = List(Unicode)
115 # frontend flags&aliases to be stripped when building kernel_argv
115 # frontend flags&aliases to be stripped when building kernel_argv
116 frontend_flags = Any(app_flags)
116 frontend_flags = Any(app_flags)
117 frontend_aliases = Any(app_aliases)
117 frontend_aliases = Any(app_aliases)
118
118
119 # create requested profiles by default, if they don't exist:
119 # create requested profiles by default, if they don't exist:
120 auto_create = CBool(True)
120 auto_create = CBool(True)
121 # connection info:
121 # connection info:
122
122
123 sshserver = Unicode('', config=True,
123 sshserver = Unicode('', config=True,
124 help="""The SSH server to use to connect to the kernel.""")
124 help="""The SSH server to use to connect to the kernel.""")
125 sshkey = Unicode('', config=True,
125 sshkey = Unicode('', config=True,
126 help="""Path to the ssh key to use for logging in to the ssh server.""")
126 help="""Path to the ssh key to use for logging in to the ssh server.""")
127
127
128 def _connection_file_default(self):
128 def _connection_file_default(self):
129 return 'kernel-%i.json' % os.getpid()
129 return 'kernel-%i.json' % os.getpid()
130
130
131 existing = CUnicode('', config=True,
131 existing = CUnicode('', config=True,
132 help="""Connect to an already running kernel""")
132 help="""Connect to an already running kernel""")
133
133
134 kernel_name = Unicode('python', config=True,
134 kernel_name = Unicode('python', config=True,
135 help="""The name of the default kernel to start.""")
135 help="""The name of the default kernel to start.""")
136
136
137 confirm_exit = CBool(True, config=True,
137 confirm_exit = CBool(True, config=True,
138 help="""
138 help="""
139 Set to display confirmation dialog on exit. You can always use 'exit' or 'quit',
139 Set to display confirmation dialog on exit. You can always use 'exit' or 'quit',
140 to force a direct exit without any confirmation.""",
140 to force a direct exit without any confirmation.""",
141 )
141 )
142
142
143 @property
143 @property
144 def help_classes(self):
144 def help_classes(self):
145 """ConsoleApps can configure kernels on the command-line
145 """ConsoleApps can configure kernels on the command-line
146
146
147 But this shouldn't be written to a file
147 But this shouldn't be written to a file
148 """
148 """
149 return self.classes + [IPKernelApp] + IPKernelApp.classes
149 return self.classes + [IPKernelApp] + IPKernelApp.classes
150
150
151 def build_kernel_argv(self, argv=None):
151 def build_kernel_argv(self, argv=None):
152 """build argv to be passed to kernel subprocess"""
152 """build argv to be passed to kernel subprocess"""
153 if argv is None:
153 if argv is None:
154 argv = sys.argv[1:]
154 argv = sys.argv[1:]
155 self.kernel_argv = swallow_argv(argv, self.frontend_aliases, self.frontend_flags)
155 self.kernel_argv = swallow_argv(argv, self.frontend_aliases, self.frontend_flags)
156
156
157 def init_connection_file(self):
157 def init_connection_file(self):
158 """find the connection file, and load the info if found.
158 """find the connection file, and load the info if found.
159
159
160 The current working directory and the current profile's security
160 The current working directory and the current profile's security
161 directory will be searched for the file if it is not given by
161 directory will be searched for the file if it is not given by
162 absolute path.
162 absolute path.
163
163
164 When attempting to connect to an existing kernel and the `--existing`
164 When attempting to connect to an existing kernel and the `--existing`
165 argument does not match an existing file, it will be interpreted as a
165 argument does not match an existing file, it will be interpreted as a
166 fileglob, and the matching file in the current profile's security dir
166 fileglob, and the matching file in the current profile's security dir
167 with the latest access time will be used.
167 with the latest access time will be used.
168
168
169 After this method is called, self.connection_file contains the *full path*
169 After this method is called, self.connection_file contains the *full path*
170 to the connection file, never just its name.
170 to the connection file, never just its name.
171 """
171 """
172 if self.existing:
172 if self.existing:
173 try:
173 try:
174 cf = find_connection_file(self.existing)
174 cf = find_connection_file(self.existing)
175 except Exception:
175 except Exception:
176 self.log.critical("Could not find existing kernel connection file %s", self.existing)
176 self.log.critical("Could not find existing kernel connection file %s", self.existing)
177 self.exit(1)
177 self.exit(1)
178 self.log.debug("Connecting to existing kernel: %s" % cf)
178 self.log.debug("Connecting to existing kernel: %s" % cf)
179 self.connection_file = cf
179 self.connection_file = cf
180 else:
180 else:
181 # not existing, check if we are going to write the file
181 # not existing, check if we are going to write the file
182 # and ensure that self.connection_file is a full path, not just the shortname
182 # and ensure that self.connection_file is a full path, not just the shortname
183 try:
183 try:
184 cf = find_connection_file(self.connection_file)
184 cf = find_connection_file(self.connection_file)
185 except Exception:
185 except Exception:
186 # file might not exist
186 # file might not exist
187 if self.connection_file == os.path.basename(self.connection_file):
187 if self.connection_file == os.path.basename(self.connection_file):
188 # just shortname, put it in security dir
188 # just shortname, put it in security dir
189 cf = os.path.join(self.profile_dir.security_dir, self.connection_file)
189 cf = os.path.join(self.profile_dir.security_dir, self.connection_file)
190 else:
190 else:
191 cf = self.connection_file
191 cf = self.connection_file
192 self.connection_file = cf
192 self.connection_file = cf
193 try:
193 try:
194 self.connection_file = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
194 self.connection_file = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
195 except IOError:
195 except IOError:
196 self.log.debug("Connection File not found: %s", self.connection_file)
196 self.log.debug("Connection File not found: %s", self.connection_file)
197 return
197 return
198
198
199 # should load_connection_file only be used for existing?
199 # should load_connection_file only be used for existing?
200 # as it is now, this allows reusing ports if an existing
200 # as it is now, this allows reusing ports if an existing
201 # file is requested
201 # file is requested
202 try:
202 try:
203 self.load_connection_file()
203 self.load_connection_file()
204 except Exception:
204 except Exception:
205 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
205 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
206 self.exit(1)
206 self.exit(1)
207
207
208 def init_ssh(self):
208 def init_ssh(self):
209 """set up ssh tunnels, if needed."""
209 """set up ssh tunnels, if needed."""
210 if not self.existing or (not self.sshserver and not self.sshkey):
210 if not self.existing or (not self.sshserver and not self.sshkey):
211 return
211 return
212 self.load_connection_file()
212 self.load_connection_file()
213
213
214 transport = self.transport
214 transport = self.transport
215 ip = self.ip
215 ip = self.ip
216
216
217 if transport != 'tcp':
217 if transport != 'tcp':
218 self.log.error("Can only use ssh tunnels with TCP sockets, not %s", transport)
218 self.log.error("Can only use ssh tunnels with TCP sockets, not %s", transport)
219 sys.exit(-1)
219 sys.exit(-1)
220
220
221 if self.sshkey and not self.sshserver:
221 if self.sshkey and not self.sshserver:
222 # specifying just the key implies that we are connecting directly
222 # specifying just the key implies that we are connecting directly
223 self.sshserver = ip
223 self.sshserver = ip
224 ip = localhost()
224 ip = localhost()
225
225
226 # build connection dict for tunnels:
226 # build connection dict for tunnels:
227 info = dict(ip=ip,
227 info = dict(ip=ip,
228 shell_port=self.shell_port,
228 shell_port=self.shell_port,
229 iopub_port=self.iopub_port,
229 iopub_port=self.iopub_port,
230 stdin_port=self.stdin_port,
230 stdin_port=self.stdin_port,
231 hb_port=self.hb_port
231 hb_port=self.hb_port
232 )
232 )
233
233
234 self.log.info("Forwarding connections to %s via %s"%(ip, self.sshserver))
234 self.log.info("Forwarding connections to %s via %s"%(ip, self.sshserver))
235
235
236 # tunnels return a new set of ports, which will be on localhost:
236 # tunnels return a new set of ports, which will be on localhost:
237 self.ip = localhost()
237 self.ip = localhost()
238 try:
238 try:
239 newports = tunnel_to_kernel(info, self.sshserver, self.sshkey)
239 newports = tunnel_to_kernel(info, self.sshserver, self.sshkey)
240 except:
240 except:
241 # even catch KeyboardInterrupt
241 # even catch KeyboardInterrupt
242 self.log.error("Could not setup tunnels", exc_info=True)
242 self.log.error("Could not setup tunnels", exc_info=True)
243 self.exit(1)
243 self.exit(1)
244
244
245 self.shell_port, self.iopub_port, self.stdin_port, self.hb_port = newports
245 self.shell_port, self.iopub_port, self.stdin_port, self.hb_port = newports
246
246
247 cf = self.connection_file
247 cf = self.connection_file
248 base,ext = os.path.splitext(cf)
248 base,ext = os.path.splitext(cf)
249 base = os.path.basename(base)
249 base = os.path.basename(base)
250 self.connection_file = os.path.basename(base)+'-ssh'+ext
250 self.connection_file = os.path.basename(base)+'-ssh'+ext
251 self.log.info("To connect another client via this tunnel, use:")
251 self.log.info("To connect another client via this tunnel, use:")
252 self.log.info("--existing %s" % self.connection_file)
252 self.log.info("--existing %s" % self.connection_file)
253
253
254 def _new_connection_file(self):
254 def _new_connection_file(self):
255 cf = ''
255 cf = ''
256 while not cf:
256 while not cf:
257 # we don't need a 128b id to distinguish kernels, use more readable
257 # we don't need a 128b id to distinguish kernels, use more readable
258 # 48b node segment (12 hex chars). Users running more than 32k simultaneous
258 # 48b node segment (12 hex chars). Users running more than 32k simultaneous
259 # kernels can subclass.
259 # kernels can subclass.
260 ident = str(uuid.uuid4()).split('-')[-1]
260 ident = str(uuid.uuid4()).split('-')[-1]
261 cf = os.path.join(self.profile_dir.security_dir, 'kernel-%s.json' % ident)
261 cf = os.path.join(self.profile_dir.security_dir, 'kernel-%s.json' % ident)
262 # only keep if it's actually new. Protect against unlikely collision
262 # only keep if it's actually new. Protect against unlikely collision
263 # in 48b random search space
263 # in 48b random search space
264 cf = cf if not os.path.exists(cf) else ''
264 cf = cf if not os.path.exists(cf) else ''
265 return cf
265 return cf
266
266
267 def init_kernel_manager(self):
267 def init_kernel_manager(self):
268 # Don't let Qt or ZMQ swallow KeyboardInterupts.
268 # Don't let Qt or ZMQ swallow KeyboardInterupts.
269 if self.existing:
269 if self.existing:
270 self.kernel_manager = None
270 self.kernel_manager = None
271 return
271 return
272 signal.signal(signal.SIGINT, signal.SIG_DFL)
272 signal.signal(signal.SIGINT, signal.SIG_DFL)
273
273
274 # Create a KernelManager and start a kernel.
274 # Create a KernelManager and start a kernel.
275 try:
275 try:
276 self.kernel_manager = self.kernel_manager_class(
276 self.kernel_manager = self.kernel_manager_class(
277 ip=self.ip,
277 ip=self.ip,
278 session=self.session,
278 session=self.session,
279 transport=self.transport,
279 transport=self.transport,
280 shell_port=self.shell_port,
280 shell_port=self.shell_port,
281 iopub_port=self.iopub_port,
281 iopub_port=self.iopub_port,
282 stdin_port=self.stdin_port,
282 stdin_port=self.stdin_port,
283 hb_port=self.hb_port,
283 hb_port=self.hb_port,
284 connection_file=self.connection_file,
284 connection_file=self.connection_file,
285 kernel_name=self.kernel_name,
285 kernel_name=self.kernel_name,
286 parent=self,
286 parent=self,
287 ipython_dir=self.ipython_dir,
287 ipython_dir=self.ipython_dir,
288 )
288 )
289 except NoSuchKernel:
289 except NoSuchKernel:
290 self.log.critical("Could not find kernel %s", self.kernel_name)
290 self.log.critical("Could not find kernel %s", self.kernel_name)
291 self.exit(1)
291 self.exit(1)
292
292
293 self.kernel_manager.client_factory = self.kernel_client_class
293 self.kernel_manager.client_factory = self.kernel_client_class
294 # FIXME: remove special treatment of IPython kernels
294 # FIXME: remove special treatment of IPython kernels
295 kwargs = {}
295 kwargs = {}
296 if self.kernel_manager.ipython_kernel:
296 if self.kernel_manager.ipython_kernel:
297 kwargs['extra_arguments'] = self.kernel_argv
297 kwargs['extra_arguments'] = self.kernel_argv
298 self.kernel_manager.start_kernel(**kwargs)
298 self.kernel_manager.start_kernel(**kwargs)
299 atexit.register(self.kernel_manager.cleanup_ipc_files)
299 atexit.register(self.kernel_manager.cleanup_ipc_files)
300
300
301 if self.sshserver:
301 if self.sshserver:
302 # ssh, write new connection file
302 # ssh, write new connection file
303 self.kernel_manager.write_connection_file()
303 self.kernel_manager.write_connection_file()
304
304
305 # in case KM defaults / ssh writing changes things:
305 # in case KM defaults / ssh writing changes things:
306 km = self.kernel_manager
306 km = self.kernel_manager
307 self.shell_port=km.shell_port
307 self.shell_port=km.shell_port
308 self.iopub_port=km.iopub_port
308 self.iopub_port=km.iopub_port
309 self.stdin_port=km.stdin_port
309 self.stdin_port=km.stdin_port
310 self.hb_port=km.hb_port
310 self.hb_port=km.hb_port
311 self.connection_file = km.connection_file
311 self.connection_file = km.connection_file
312
312
313 atexit.register(self.kernel_manager.cleanup_connection_file)
313 atexit.register(self.kernel_manager.cleanup_connection_file)
314
314
315 def init_kernel_client(self):
315 def init_kernel_client(self):
316 if self.kernel_manager is not None:
316 if self.kernel_manager is not None:
317 self.kernel_client = self.kernel_manager.client()
317 self.kernel_client = self.kernel_manager.client()
318 else:
318 else:
319 self.kernel_client = self.kernel_client_class(
319 self.kernel_client = self.kernel_client_class(
320 session=self.session,
320 session=self.session,
321 ip=self.ip,
321 ip=self.ip,
322 transport=self.transport,
322 transport=self.transport,
323 shell_port=self.shell_port,
323 shell_port=self.shell_port,
324 iopub_port=self.iopub_port,
324 iopub_port=self.iopub_port,
325 stdin_port=self.stdin_port,
325 stdin_port=self.stdin_port,
326 hb_port=self.hb_port,
326 hb_port=self.hb_port,
327 connection_file=self.connection_file,
327 connection_file=self.connection_file,
328 parent=self,
328 parent=self,
329 )
329 )
330
330
331 self.kernel_client.start_channels()
331 self.kernel_client.start_channels()
332
332
333
333
334
334
335 def initialize(self, argv=None):
335 def initialize(self, argv=None):
336 """
336 """
337 Classes which mix this class in should call:
337 Classes which mix this class in should call:
338 IPythonConsoleApp.initialize(self,argv)
338 IPythonConsoleApp.initialize(self,argv)
339 """
339 """
340 self.init_connection_file()
340 self.init_connection_file()
341 default_secure(self.config)
342 self.init_ssh()
341 self.init_ssh()
343 self.init_kernel_manager()
342 self.init_kernel_manager()
344 self.init_kernel_client()
343 self.init_kernel_client()
345
344
@@ -1,1118 +1,1115 b''
1 # coding: utf-8
1 # coding: utf-8
2 """A tornado based IPython notebook server."""
2 """A tornado based IPython notebook server."""
3
3
4 # Copyright (c) IPython Development Team.
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
5 # Distributed under the terms of the Modified BSD License.
6
6
7 from __future__ import print_function
7 from __future__ import print_function
8
8
9 import base64
9 import base64
10 import datetime
10 import datetime
11 import errno
11 import errno
12 import importlib
12 import importlib
13 import io
13 import io
14 import json
14 import json
15 import logging
15 import logging
16 import os
16 import os
17 import random
17 import random
18 import re
18 import re
19 import select
19 import select
20 import signal
20 import signal
21 import socket
21 import socket
22 import sys
22 import sys
23 import threading
23 import threading
24 import webbrowser
24 import webbrowser
25
25
26
26
27 # check for pyzmq
27 # check for pyzmq
28 from IPython.utils.zmqrelated import check_for_zmq
28 from IPython.utils.zmqrelated import check_for_zmq
29 check_for_zmq('13', 'IPython.html')
29 check_for_zmq('13', 'IPython.html')
30
30
31 from jinja2 import Environment, FileSystemLoader
31 from jinja2 import Environment, FileSystemLoader
32
32
33 # Install the pyzmq ioloop. This has to be done before anything else from
33 # Install the pyzmq ioloop. This has to be done before anything else from
34 # tornado is imported.
34 # tornado is imported.
35 from zmq.eventloop import ioloop
35 from zmq.eventloop import ioloop
36 ioloop.install()
36 ioloop.install()
37
37
38 # check for tornado 3.1.0
38 # check for tornado 3.1.0
39 msg = "The IPython Notebook requires tornado >= 4.0"
39 msg = "The IPython Notebook requires tornado >= 4.0"
40 try:
40 try:
41 import tornado
41 import tornado
42 except ImportError:
42 except ImportError:
43 raise ImportError(msg)
43 raise ImportError(msg)
44 try:
44 try:
45 version_info = tornado.version_info
45 version_info = tornado.version_info
46 except AttributeError:
46 except AttributeError:
47 raise ImportError(msg + ", but you have < 1.1.0")
47 raise ImportError(msg + ", but you have < 1.1.0")
48 if version_info < (4,0):
48 if version_info < (4,0):
49 raise ImportError(msg + ", but you have %s" % tornado.version)
49 raise ImportError(msg + ", but you have %s" % tornado.version)
50
50
51 from tornado import httpserver
51 from tornado import httpserver
52 from tornado import web
52 from tornado import web
53 from tornado.log import LogFormatter, app_log, access_log, gen_log
53 from tornado.log import LogFormatter, app_log, access_log, gen_log
54
54
55 from IPython.html import (
55 from IPython.html import (
56 DEFAULT_STATIC_FILES_PATH,
56 DEFAULT_STATIC_FILES_PATH,
57 DEFAULT_TEMPLATE_PATH_LIST,
57 DEFAULT_TEMPLATE_PATH_LIST,
58 )
58 )
59 from .base.handlers import Template404
59 from .base.handlers import Template404
60 from .log import log_request
60 from .log import log_request
61 from .services.kernels.kernelmanager import MappingKernelManager
61 from .services.kernels.kernelmanager import MappingKernelManager
62 from .services.config import ConfigManager
62 from .services.config import ConfigManager
63 from .services.contents.manager import ContentsManager
63 from .services.contents.manager import ContentsManager
64 from .services.contents.filemanager import FileContentsManager
64 from .services.contents.filemanager import FileContentsManager
65 from .services.clusters.clustermanager import ClusterManager
65 from .services.clusters.clustermanager import ClusterManager
66 from .services.sessions.sessionmanager import SessionManager
66 from .services.sessions.sessionmanager import SessionManager
67
67
68 from .auth.login import LoginHandler
68 from .auth.login import LoginHandler
69 from .auth.logout import LogoutHandler
69 from .auth.logout import LogoutHandler
70 from .base.handlers import IPythonHandler, FileFindHandler
70 from .base.handlers import IPythonHandler, FileFindHandler
71
71
72 from IPython.config import Config
72 from IPython.config import Config
73 from IPython.config.application import catch_config_error, boolean_flag
73 from IPython.config.application import catch_config_error, boolean_flag
74 from IPython.core.application import (
74 from IPython.core.application import (
75 BaseIPythonApplication, base_flags, base_aliases,
75 BaseIPythonApplication, base_flags, base_aliases,
76 )
76 )
77 from IPython.core.profiledir import ProfileDir
77 from IPython.core.profiledir import ProfileDir
78 from IPython.kernel import KernelManager
78 from IPython.kernel import KernelManager
79 from IPython.kernel.kernelspec import KernelSpecManager
79 from IPython.kernel.kernelspec import KernelSpecManager
80 from IPython.kernel.zmq.session import default_secure, Session
80 from IPython.kernel.zmq.session import Session
81 from IPython.nbformat.sign import NotebookNotary
81 from IPython.nbformat.sign import NotebookNotary
82 from IPython.utils.importstring import import_item
82 from IPython.utils.importstring import import_item
83 from IPython.utils import submodule
83 from IPython.utils import submodule
84 from IPython.utils.process import check_pid
84 from IPython.utils.process import check_pid
85 from IPython.utils.traitlets import (
85 from IPython.utils.traitlets import (
86 Dict, Unicode, Integer, List, Bool, Bytes, Instance,
86 Dict, Unicode, Integer, List, Bool, Bytes, Instance,
87 TraitError, Type,
87 TraitError, Type,
88 )
88 )
89 from IPython.utils import py3compat
89 from IPython.utils import py3compat
90 from IPython.utils.path import filefind, get_ipython_dir
90 from IPython.utils.path import filefind, get_ipython_dir
91 from IPython.utils.sysinfo import get_sys_info
91 from IPython.utils.sysinfo import get_sys_info
92
92
93 from .nbextensions import SYSTEM_NBEXTENSIONS_DIRS
93 from .nbextensions import SYSTEM_NBEXTENSIONS_DIRS
94 from .utils import url_path_join
94 from .utils import url_path_join
95
95
96 #-----------------------------------------------------------------------------
96 #-----------------------------------------------------------------------------
97 # Module globals
97 # Module globals
98 #-----------------------------------------------------------------------------
98 #-----------------------------------------------------------------------------
99
99
100 _examples = """
100 _examples = """
101 ipython notebook # start the notebook
101 ipython notebook # start the notebook
102 ipython notebook --profile=sympy # use the sympy profile
102 ipython notebook --profile=sympy # use the sympy profile
103 ipython notebook --certfile=mycert.pem # use SSL/TLS certificate
103 ipython notebook --certfile=mycert.pem # use SSL/TLS certificate
104 """
104 """
105
105
106 #-----------------------------------------------------------------------------
106 #-----------------------------------------------------------------------------
107 # Helper functions
107 # Helper functions
108 #-----------------------------------------------------------------------------
108 #-----------------------------------------------------------------------------
109
109
110 def random_ports(port, n):
110 def random_ports(port, n):
111 """Generate a list of n random ports near the given port.
111 """Generate a list of n random ports near the given port.
112
112
113 The first 5 ports will be sequential, and the remaining n-5 will be
113 The first 5 ports will be sequential, and the remaining n-5 will be
114 randomly selected in the range [port-2*n, port+2*n].
114 randomly selected in the range [port-2*n, port+2*n].
115 """
115 """
116 for i in range(min(5, n)):
116 for i in range(min(5, n)):
117 yield port + i
117 yield port + i
118 for i in range(n-5):
118 for i in range(n-5):
119 yield max(1, port + random.randint(-2*n, 2*n))
119 yield max(1, port + random.randint(-2*n, 2*n))
120
120
121 def load_handlers(name):
121 def load_handlers(name):
122 """Load the (URL pattern, handler) tuples for each component."""
122 """Load the (URL pattern, handler) tuples for each component."""
123 name = 'IPython.html.' + name
123 name = 'IPython.html.' + name
124 mod = __import__(name, fromlist=['default_handlers'])
124 mod = __import__(name, fromlist=['default_handlers'])
125 return mod.default_handlers
125 return mod.default_handlers
126
126
127 #-----------------------------------------------------------------------------
127 #-----------------------------------------------------------------------------
128 # The Tornado web application
128 # The Tornado web application
129 #-----------------------------------------------------------------------------
129 #-----------------------------------------------------------------------------
130
130
131 class NotebookWebApplication(web.Application):
131 class NotebookWebApplication(web.Application):
132
132
133 def __init__(self, ipython_app, kernel_manager, contents_manager,
133 def __init__(self, ipython_app, kernel_manager, contents_manager,
134 cluster_manager, session_manager, kernel_spec_manager,
134 cluster_manager, session_manager, kernel_spec_manager,
135 config_manager, log,
135 config_manager, log,
136 base_url, default_url, settings_overrides, jinja_env_options):
136 base_url, default_url, settings_overrides, jinja_env_options):
137
137
138 settings = self.init_settings(
138 settings = self.init_settings(
139 ipython_app, kernel_manager, contents_manager, cluster_manager,
139 ipython_app, kernel_manager, contents_manager, cluster_manager,
140 session_manager, kernel_spec_manager, config_manager, log, base_url,
140 session_manager, kernel_spec_manager, config_manager, log, base_url,
141 default_url, settings_overrides, jinja_env_options)
141 default_url, settings_overrides, jinja_env_options)
142 handlers = self.init_handlers(settings)
142 handlers = self.init_handlers(settings)
143
143
144 super(NotebookWebApplication, self).__init__(handlers, **settings)
144 super(NotebookWebApplication, self).__init__(handlers, **settings)
145
145
146 def init_settings(self, ipython_app, kernel_manager, contents_manager,
146 def init_settings(self, ipython_app, kernel_manager, contents_manager,
147 cluster_manager, session_manager, kernel_spec_manager,
147 cluster_manager, session_manager, kernel_spec_manager,
148 config_manager,
148 config_manager,
149 log, base_url, default_url, settings_overrides,
149 log, base_url, default_url, settings_overrides,
150 jinja_env_options=None):
150 jinja_env_options=None):
151
151
152 _template_path = settings_overrides.get(
152 _template_path = settings_overrides.get(
153 "template_path",
153 "template_path",
154 ipython_app.template_file_path,
154 ipython_app.template_file_path,
155 )
155 )
156 if isinstance(_template_path, str):
156 if isinstance(_template_path, str):
157 _template_path = (_template_path,)
157 _template_path = (_template_path,)
158 template_path = [os.path.expanduser(path) for path in _template_path]
158 template_path = [os.path.expanduser(path) for path in _template_path]
159
159
160 jenv_opt = jinja_env_options if jinja_env_options else {}
160 jenv_opt = jinja_env_options if jinja_env_options else {}
161 env = Environment(loader=FileSystemLoader(template_path), **jenv_opt)
161 env = Environment(loader=FileSystemLoader(template_path), **jenv_opt)
162
162
163 sys_info = get_sys_info()
163 sys_info = get_sys_info()
164 if sys_info['commit_source'] == 'repository':
164 if sys_info['commit_source'] == 'repository':
165 # don't cache (rely on 304) when working from master
165 # don't cache (rely on 304) when working from master
166 version_hash = ''
166 version_hash = ''
167 else:
167 else:
168 # reset the cache on server restart
168 # reset the cache on server restart
169 version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
169 version_hash = datetime.datetime.now().strftime("%Y%m%d%H%M%S")
170
170
171 settings = dict(
171 settings = dict(
172 # basics
172 # basics
173 log_function=log_request,
173 log_function=log_request,
174 base_url=base_url,
174 base_url=base_url,
175 default_url=default_url,
175 default_url=default_url,
176 template_path=template_path,
176 template_path=template_path,
177 static_path=ipython_app.static_file_path,
177 static_path=ipython_app.static_file_path,
178 static_handler_class = FileFindHandler,
178 static_handler_class = FileFindHandler,
179 static_url_prefix = url_path_join(base_url,'/static/'),
179 static_url_prefix = url_path_join(base_url,'/static/'),
180 static_handler_args = {
180 static_handler_args = {
181 # don't cache custom.js
181 # don't cache custom.js
182 'no_cache_paths': [url_path_join(base_url, 'static', 'custom')],
182 'no_cache_paths': [url_path_join(base_url, 'static', 'custom')],
183 },
183 },
184 version_hash=version_hash,
184 version_hash=version_hash,
185
185
186 # authentication
186 # authentication
187 cookie_secret=ipython_app.cookie_secret,
187 cookie_secret=ipython_app.cookie_secret,
188 login_url=url_path_join(base_url,'/login'),
188 login_url=url_path_join(base_url,'/login'),
189 login_handler_class=ipython_app.login_handler_class,
189 login_handler_class=ipython_app.login_handler_class,
190 logout_handler_class=ipython_app.logout_handler_class,
190 logout_handler_class=ipython_app.logout_handler_class,
191 password=ipython_app.password,
191 password=ipython_app.password,
192
192
193 # managers
193 # managers
194 kernel_manager=kernel_manager,
194 kernel_manager=kernel_manager,
195 contents_manager=contents_manager,
195 contents_manager=contents_manager,
196 cluster_manager=cluster_manager,
196 cluster_manager=cluster_manager,
197 session_manager=session_manager,
197 session_manager=session_manager,
198 kernel_spec_manager=kernel_spec_manager,
198 kernel_spec_manager=kernel_spec_manager,
199 config_manager=config_manager,
199 config_manager=config_manager,
200
200
201 # IPython stuff
201 # IPython stuff
202 nbextensions_path=ipython_app.nbextensions_path,
202 nbextensions_path=ipython_app.nbextensions_path,
203 websocket_url=ipython_app.websocket_url,
203 websocket_url=ipython_app.websocket_url,
204 mathjax_url=ipython_app.mathjax_url,
204 mathjax_url=ipython_app.mathjax_url,
205 config=ipython_app.config,
205 config=ipython_app.config,
206 jinja2_env=env,
206 jinja2_env=env,
207 terminals_available=False, # Set later if terminals are available
207 terminals_available=False, # Set later if terminals are available
208 )
208 )
209
209
210 # allow custom overrides for the tornado web app.
210 # allow custom overrides for the tornado web app.
211 settings.update(settings_overrides)
211 settings.update(settings_overrides)
212 return settings
212 return settings
213
213
214 def init_handlers(self, settings):
214 def init_handlers(self, settings):
215 """Load the (URL pattern, handler) tuples for each component."""
215 """Load the (URL pattern, handler) tuples for each component."""
216
216
217 # Order matters. The first handler to match the URL will handle the request.
217 # Order matters. The first handler to match the URL will handle the request.
218 handlers = []
218 handlers = []
219 handlers.extend(load_handlers('tree.handlers'))
219 handlers.extend(load_handlers('tree.handlers'))
220 handlers.extend([(r"/login", settings['login_handler_class'])])
220 handlers.extend([(r"/login", settings['login_handler_class'])])
221 handlers.extend([(r"/logout", settings['logout_handler_class'])])
221 handlers.extend([(r"/logout", settings['logout_handler_class'])])
222 handlers.extend(load_handlers('files.handlers'))
222 handlers.extend(load_handlers('files.handlers'))
223 handlers.extend(load_handlers('notebook.handlers'))
223 handlers.extend(load_handlers('notebook.handlers'))
224 handlers.extend(load_handlers('nbconvert.handlers'))
224 handlers.extend(load_handlers('nbconvert.handlers'))
225 handlers.extend(load_handlers('kernelspecs.handlers'))
225 handlers.extend(load_handlers('kernelspecs.handlers'))
226 handlers.extend(load_handlers('edit.handlers'))
226 handlers.extend(load_handlers('edit.handlers'))
227 handlers.extend(load_handlers('services.config.handlers'))
227 handlers.extend(load_handlers('services.config.handlers'))
228 handlers.extend(load_handlers('services.kernels.handlers'))
228 handlers.extend(load_handlers('services.kernels.handlers'))
229 handlers.extend(load_handlers('services.contents.handlers'))
229 handlers.extend(load_handlers('services.contents.handlers'))
230 handlers.extend(load_handlers('services.clusters.handlers'))
230 handlers.extend(load_handlers('services.clusters.handlers'))
231 handlers.extend(load_handlers('services.sessions.handlers'))
231 handlers.extend(load_handlers('services.sessions.handlers'))
232 handlers.extend(load_handlers('services.nbconvert.handlers'))
232 handlers.extend(load_handlers('services.nbconvert.handlers'))
233 handlers.extend(load_handlers('services.kernelspecs.handlers'))
233 handlers.extend(load_handlers('services.kernelspecs.handlers'))
234 handlers.extend(load_handlers('services.security.handlers'))
234 handlers.extend(load_handlers('services.security.handlers'))
235 handlers.append(
235 handlers.append(
236 (r"/nbextensions/(.*)", FileFindHandler, {
236 (r"/nbextensions/(.*)", FileFindHandler, {
237 'path': settings['nbextensions_path'],
237 'path': settings['nbextensions_path'],
238 'no_cache_paths': ['/'], # don't cache anything in nbextensions
238 'no_cache_paths': ['/'], # don't cache anything in nbextensions
239 }),
239 }),
240 )
240 )
241 # register base handlers last
241 # register base handlers last
242 handlers.extend(load_handlers('base.handlers'))
242 handlers.extend(load_handlers('base.handlers'))
243 # set the URL that will be redirected from `/`
243 # set the URL that will be redirected from `/`
244 handlers.append(
244 handlers.append(
245 (r'/?', web.RedirectHandler, {
245 (r'/?', web.RedirectHandler, {
246 'url' : settings['default_url'],
246 'url' : settings['default_url'],
247 'permanent': False, # want 302, not 301
247 'permanent': False, # want 302, not 301
248 })
248 })
249 )
249 )
250 # prepend base_url onto the patterns that we match
250 # prepend base_url onto the patterns that we match
251 new_handlers = []
251 new_handlers = []
252 for handler in handlers:
252 for handler in handlers:
253 pattern = url_path_join(settings['base_url'], handler[0])
253 pattern = url_path_join(settings['base_url'], handler[0])
254 new_handler = tuple([pattern] + list(handler[1:]))
254 new_handler = tuple([pattern] + list(handler[1:]))
255 new_handlers.append(new_handler)
255 new_handlers.append(new_handler)
256 # add 404 on the end, which will catch everything that falls through
256 # add 404 on the end, which will catch everything that falls through
257 new_handlers.append((r'(.*)', Template404))
257 new_handlers.append((r'(.*)', Template404))
258 return new_handlers
258 return new_handlers
259
259
260
260
261 class NbserverListApp(BaseIPythonApplication):
261 class NbserverListApp(BaseIPythonApplication):
262
262
263 description="List currently running notebook servers in this profile."
263 description="List currently running notebook servers in this profile."
264
264
265 flags = dict(
265 flags = dict(
266 json=({'NbserverListApp': {'json': True}},
266 json=({'NbserverListApp': {'json': True}},
267 "Produce machine-readable JSON output."),
267 "Produce machine-readable JSON output."),
268 )
268 )
269
269
270 json = Bool(False, config=True,
270 json = Bool(False, config=True,
271 help="If True, each line of output will be a JSON object with the "
271 help="If True, each line of output will be a JSON object with the "
272 "details from the server info file.")
272 "details from the server info file.")
273
273
274 def start(self):
274 def start(self):
275 if not self.json:
275 if not self.json:
276 print("Currently running servers:")
276 print("Currently running servers:")
277 for serverinfo in list_running_servers(self.profile):
277 for serverinfo in list_running_servers(self.profile):
278 if self.json:
278 if self.json:
279 print(json.dumps(serverinfo))
279 print(json.dumps(serverinfo))
280 else:
280 else:
281 print(serverinfo['url'], "::", serverinfo['notebook_dir'])
281 print(serverinfo['url'], "::", serverinfo['notebook_dir'])
282
282
283 #-----------------------------------------------------------------------------
283 #-----------------------------------------------------------------------------
284 # Aliases and Flags
284 # Aliases and Flags
285 #-----------------------------------------------------------------------------
285 #-----------------------------------------------------------------------------
286
286
287 flags = dict(base_flags)
287 flags = dict(base_flags)
288 flags['no-browser']=(
288 flags['no-browser']=(
289 {'NotebookApp' : {'open_browser' : False}},
289 {'NotebookApp' : {'open_browser' : False}},
290 "Don't open the notebook in a browser after startup."
290 "Don't open the notebook in a browser after startup."
291 )
291 )
292 flags['pylab']=(
292 flags['pylab']=(
293 {'NotebookApp' : {'pylab' : 'warn'}},
293 {'NotebookApp' : {'pylab' : 'warn'}},
294 "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib."
294 "DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib."
295 )
295 )
296 flags['no-mathjax']=(
296 flags['no-mathjax']=(
297 {'NotebookApp' : {'enable_mathjax' : False}},
297 {'NotebookApp' : {'enable_mathjax' : False}},
298 """Disable MathJax
298 """Disable MathJax
299
299
300 MathJax is the javascript library IPython uses to render math/LaTeX. It is
300 MathJax is the javascript library IPython uses to render math/LaTeX. It is
301 very large, so you may want to disable it if you have a slow internet
301 very large, so you may want to disable it if you have a slow internet
302 connection, or for offline use of the notebook.
302 connection, or for offline use of the notebook.
303
303
304 When disabled, equations etc. will appear as their untransformed TeX source.
304 When disabled, equations etc. will appear as their untransformed TeX source.
305 """
305 """
306 )
306 )
307
307
308 # Add notebook manager flags
308 # Add notebook manager flags
309 flags.update(boolean_flag('script', 'FileContentsManager.save_script',
309 flags.update(boolean_flag('script', 'FileContentsManager.save_script',
310 'DEPRECATED, IGNORED',
310 'DEPRECATED, IGNORED',
311 'DEPRECATED, IGNORED'))
311 'DEPRECATED, IGNORED'))
312
312
313 aliases = dict(base_aliases)
313 aliases = dict(base_aliases)
314
314
315 aliases.update({
315 aliases.update({
316 'ip': 'NotebookApp.ip',
316 'ip': 'NotebookApp.ip',
317 'port': 'NotebookApp.port',
317 'port': 'NotebookApp.port',
318 'port-retries': 'NotebookApp.port_retries',
318 'port-retries': 'NotebookApp.port_retries',
319 'transport': 'KernelManager.transport',
319 'transport': 'KernelManager.transport',
320 'keyfile': 'NotebookApp.keyfile',
320 'keyfile': 'NotebookApp.keyfile',
321 'certfile': 'NotebookApp.certfile',
321 'certfile': 'NotebookApp.certfile',
322 'notebook-dir': 'NotebookApp.notebook_dir',
322 'notebook-dir': 'NotebookApp.notebook_dir',
323 'browser': 'NotebookApp.browser',
323 'browser': 'NotebookApp.browser',
324 'pylab': 'NotebookApp.pylab',
324 'pylab': 'NotebookApp.pylab',
325 })
325 })
326
326
327 #-----------------------------------------------------------------------------
327 #-----------------------------------------------------------------------------
328 # NotebookApp
328 # NotebookApp
329 #-----------------------------------------------------------------------------
329 #-----------------------------------------------------------------------------
330
330
331 class NotebookApp(BaseIPythonApplication):
331 class NotebookApp(BaseIPythonApplication):
332
332
333 name = 'ipython-notebook'
333 name = 'ipython-notebook'
334
334
335 description = """
335 description = """
336 The IPython HTML Notebook.
336 The IPython HTML Notebook.
337
337
338 This launches a Tornado based HTML Notebook Server that serves up an
338 This launches a Tornado based HTML Notebook Server that serves up an
339 HTML5/Javascript Notebook client.
339 HTML5/Javascript Notebook client.
340 """
340 """
341 examples = _examples
341 examples = _examples
342 aliases = aliases
342 aliases = aliases
343 flags = flags
343 flags = flags
344
344
345 classes = [
345 classes = [
346 KernelManager, ProfileDir, Session, MappingKernelManager,
346 KernelManager, ProfileDir, Session, MappingKernelManager,
347 ContentsManager, FileContentsManager, NotebookNotary,
347 ContentsManager, FileContentsManager, NotebookNotary,
348 KernelSpecManager,
348 KernelSpecManager,
349 ]
349 ]
350 flags = Dict(flags)
350 flags = Dict(flags)
351 aliases = Dict(aliases)
351 aliases = Dict(aliases)
352
352
353 subcommands = dict(
353 subcommands = dict(
354 list=(NbserverListApp, NbserverListApp.description.splitlines()[0]),
354 list=(NbserverListApp, NbserverListApp.description.splitlines()[0]),
355 )
355 )
356
356
357 ipython_kernel_argv = List(Unicode)
357 ipython_kernel_argv = List(Unicode)
358
358
359 _log_formatter_cls = LogFormatter
359 _log_formatter_cls = LogFormatter
360
360
361 def _log_level_default(self):
361 def _log_level_default(self):
362 return logging.INFO
362 return logging.INFO
363
363
364 def _log_datefmt_default(self):
364 def _log_datefmt_default(self):
365 """Exclude date from default date format"""
365 """Exclude date from default date format"""
366 return "%H:%M:%S"
366 return "%H:%M:%S"
367
367
368 def _log_format_default(self):
368 def _log_format_default(self):
369 """override default log format to include time"""
369 """override default log format to include time"""
370 return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s"
370 return u"%(color)s[%(levelname)1.1s %(asctime)s.%(msecs).03d %(name)s]%(end_color)s %(message)s"
371
371
372 # create requested profiles by default, if they don't exist:
372 # create requested profiles by default, if they don't exist:
373 auto_create = Bool(True)
373 auto_create = Bool(True)
374
374
375 # file to be opened in the notebook server
375 # file to be opened in the notebook server
376 file_to_run = Unicode('', config=True)
376 file_to_run = Unicode('', config=True)
377
377
378 # Network related information
378 # Network related information
379
379
380 allow_origin = Unicode('', config=True,
380 allow_origin = Unicode('', config=True,
381 help="""Set the Access-Control-Allow-Origin header
381 help="""Set the Access-Control-Allow-Origin header
382
382
383 Use '*' to allow any origin to access your server.
383 Use '*' to allow any origin to access your server.
384
384
385 Takes precedence over allow_origin_pat.
385 Takes precedence over allow_origin_pat.
386 """
386 """
387 )
387 )
388
388
389 allow_origin_pat = Unicode('', config=True,
389 allow_origin_pat = Unicode('', config=True,
390 help="""Use a regular expression for the Access-Control-Allow-Origin header
390 help="""Use a regular expression for the Access-Control-Allow-Origin header
391
391
392 Requests from an origin matching the expression will get replies with:
392 Requests from an origin matching the expression will get replies with:
393
393
394 Access-Control-Allow-Origin: origin
394 Access-Control-Allow-Origin: origin
395
395
396 where `origin` is the origin of the request.
396 where `origin` is the origin of the request.
397
397
398 Ignored if allow_origin is set.
398 Ignored if allow_origin is set.
399 """
399 """
400 )
400 )
401
401
402 allow_credentials = Bool(False, config=True,
402 allow_credentials = Bool(False, config=True,
403 help="Set the Access-Control-Allow-Credentials: true header"
403 help="Set the Access-Control-Allow-Credentials: true header"
404 )
404 )
405
405
406 default_url = Unicode('/tree', config=True,
406 default_url = Unicode('/tree', config=True,
407 help="The default URL to redirect to from `/`"
407 help="The default URL to redirect to from `/`"
408 )
408 )
409
409
410 ip = Unicode('localhost', config=True,
410 ip = Unicode('localhost', config=True,
411 help="The IP address the notebook server will listen on."
411 help="The IP address the notebook server will listen on."
412 )
412 )
413
413
414 def _ip_changed(self, name, old, new):
414 def _ip_changed(self, name, old, new):
415 if new == u'*': self.ip = u''
415 if new == u'*': self.ip = u''
416
416
417 port = Integer(8888, config=True,
417 port = Integer(8888, config=True,
418 help="The port the notebook server will listen on."
418 help="The port the notebook server will listen on."
419 )
419 )
420 port_retries = Integer(50, config=True,
420 port_retries = Integer(50, config=True,
421 help="The number of additional ports to try if the specified port is not available."
421 help="The number of additional ports to try if the specified port is not available."
422 )
422 )
423
423
424 certfile = Unicode(u'', config=True,
424 certfile = Unicode(u'', config=True,
425 help="""The full path to an SSL/TLS certificate file."""
425 help="""The full path to an SSL/TLS certificate file."""
426 )
426 )
427
427
428 keyfile = Unicode(u'', config=True,
428 keyfile = Unicode(u'', config=True,
429 help="""The full path to a private key file for usage with SSL/TLS."""
429 help="""The full path to a private key file for usage with SSL/TLS."""
430 )
430 )
431
431
432 cookie_secret_file = Unicode(config=True,
432 cookie_secret_file = Unicode(config=True,
433 help="""The file where the cookie secret is stored."""
433 help="""The file where the cookie secret is stored."""
434 )
434 )
435 def _cookie_secret_file_default(self):
435 def _cookie_secret_file_default(self):
436 if self.profile_dir is None:
436 if self.profile_dir is None:
437 return ''
437 return ''
438 return os.path.join(self.profile_dir.security_dir, 'notebook_cookie_secret')
438 return os.path.join(self.profile_dir.security_dir, 'notebook_cookie_secret')
439
439
440 cookie_secret = Bytes(b'', config=True,
440 cookie_secret = Bytes(b'', config=True,
441 help="""The random bytes used to secure cookies.
441 help="""The random bytes used to secure cookies.
442 By default this is a new random number every time you start the Notebook.
442 By default this is a new random number every time you start the Notebook.
443 Set it to a value in a config file to enable logins to persist across server sessions.
443 Set it to a value in a config file to enable logins to persist across server sessions.
444
444
445 Note: Cookie secrets should be kept private, do not share config files with
445 Note: Cookie secrets should be kept private, do not share config files with
446 cookie_secret stored in plaintext (you can read the value from a file).
446 cookie_secret stored in plaintext (you can read the value from a file).
447 """
447 """
448 )
448 )
449 def _cookie_secret_default(self):
449 def _cookie_secret_default(self):
450 if os.path.exists(self.cookie_secret_file):
450 if os.path.exists(self.cookie_secret_file):
451 with io.open(self.cookie_secret_file, 'rb') as f:
451 with io.open(self.cookie_secret_file, 'rb') as f:
452 return f.read()
452 return f.read()
453 else:
453 else:
454 secret = base64.encodestring(os.urandom(1024))
454 secret = base64.encodestring(os.urandom(1024))
455 self._write_cookie_secret_file(secret)
455 self._write_cookie_secret_file(secret)
456 return secret
456 return secret
457
457
458 def _write_cookie_secret_file(self, secret):
458 def _write_cookie_secret_file(self, secret):
459 """write my secret to my secret_file"""
459 """write my secret to my secret_file"""
460 self.log.info("Writing notebook server cookie secret to %s", self.cookie_secret_file)
460 self.log.info("Writing notebook server cookie secret to %s", self.cookie_secret_file)
461 with io.open(self.cookie_secret_file, 'wb') as f:
461 with io.open(self.cookie_secret_file, 'wb') as f:
462 f.write(secret)
462 f.write(secret)
463 try:
463 try:
464 os.chmod(self.cookie_secret_file, 0o600)
464 os.chmod(self.cookie_secret_file, 0o600)
465 except OSError:
465 except OSError:
466 self.log.warn(
466 self.log.warn(
467 "Could not set permissions on %s",
467 "Could not set permissions on %s",
468 self.cookie_secret_file
468 self.cookie_secret_file
469 )
469 )
470
470
471 password = Unicode(u'', config=True,
471 password = Unicode(u'', config=True,
472 help="""Hashed password to use for web authentication.
472 help="""Hashed password to use for web authentication.
473
473
474 To generate, type in a python/IPython shell:
474 To generate, type in a python/IPython shell:
475
475
476 from IPython.lib import passwd; passwd()
476 from IPython.lib import passwd; passwd()
477
477
478 The string should be of the form type:salt:hashed-password.
478 The string should be of the form type:salt:hashed-password.
479 """
479 """
480 )
480 )
481
481
482 open_browser = Bool(True, config=True,
482 open_browser = Bool(True, config=True,
483 help="""Whether to open in a browser after starting.
483 help="""Whether to open in a browser after starting.
484 The specific browser used is platform dependent and
484 The specific browser used is platform dependent and
485 determined by the python standard library `webbrowser`
485 determined by the python standard library `webbrowser`
486 module, unless it is overridden using the --browser
486 module, unless it is overridden using the --browser
487 (NotebookApp.browser) configuration option.
487 (NotebookApp.browser) configuration option.
488 """)
488 """)
489
489
490 browser = Unicode(u'', config=True,
490 browser = Unicode(u'', config=True,
491 help="""Specify what command to use to invoke a web
491 help="""Specify what command to use to invoke a web
492 browser when opening the notebook. If not specified, the
492 browser when opening the notebook. If not specified, the
493 default browser will be determined by the `webbrowser`
493 default browser will be determined by the `webbrowser`
494 standard library module, which allows setting of the
494 standard library module, which allows setting of the
495 BROWSER environment variable to override it.
495 BROWSER environment variable to override it.
496 """)
496 """)
497
497
498 webapp_settings = Dict(config=True,
498 webapp_settings = Dict(config=True,
499 help="DEPRECATED, use tornado_settings"
499 help="DEPRECATED, use tornado_settings"
500 )
500 )
501 def _webapp_settings_changed(self, name, old, new):
501 def _webapp_settings_changed(self, name, old, new):
502 self.log.warn("\n webapp_settings is deprecated, use tornado_settings.\n")
502 self.log.warn("\n webapp_settings is deprecated, use tornado_settings.\n")
503 self.tornado_settings = new
503 self.tornado_settings = new
504
504
505 tornado_settings = Dict(config=True,
505 tornado_settings = Dict(config=True,
506 help="Supply overrides for the tornado.web.Application that the "
506 help="Supply overrides for the tornado.web.Application that the "
507 "IPython notebook uses.")
507 "IPython notebook uses.")
508
508
509 ssl_options = Dict(config=True,
509 ssl_options = Dict(config=True,
510 help="""Supply SSL options for the tornado HTTPServer.
510 help="""Supply SSL options for the tornado HTTPServer.
511 See the tornado docs for details.""")
511 See the tornado docs for details.""")
512
512
513 jinja_environment_options = Dict(config=True,
513 jinja_environment_options = Dict(config=True,
514 help="Supply extra arguments that will be passed to Jinja environment.")
514 help="Supply extra arguments that will be passed to Jinja environment.")
515
515
516 enable_mathjax = Bool(True, config=True,
516 enable_mathjax = Bool(True, config=True,
517 help="""Whether to enable MathJax for typesetting math/TeX
517 help="""Whether to enable MathJax for typesetting math/TeX
518
518
519 MathJax is the javascript library IPython uses to render math/LaTeX. It is
519 MathJax is the javascript library IPython uses to render math/LaTeX. It is
520 very large, so you may want to disable it if you have a slow internet
520 very large, so you may want to disable it if you have a slow internet
521 connection, or for offline use of the notebook.
521 connection, or for offline use of the notebook.
522
522
523 When disabled, equations etc. will appear as their untransformed TeX source.
523 When disabled, equations etc. will appear as their untransformed TeX source.
524 """
524 """
525 )
525 )
526 def _enable_mathjax_changed(self, name, old, new):
526 def _enable_mathjax_changed(self, name, old, new):
527 """set mathjax url to empty if mathjax is disabled"""
527 """set mathjax url to empty if mathjax is disabled"""
528 if not new:
528 if not new:
529 self.mathjax_url = u''
529 self.mathjax_url = u''
530
530
531 base_url = Unicode('/', config=True,
531 base_url = Unicode('/', config=True,
532 help='''The base URL for the notebook server.
532 help='''The base URL for the notebook server.
533
533
534 Leading and trailing slashes can be omitted,
534 Leading and trailing slashes can be omitted,
535 and will automatically be added.
535 and will automatically be added.
536 ''')
536 ''')
537 def _base_url_changed(self, name, old, new):
537 def _base_url_changed(self, name, old, new):
538 if not new.startswith('/'):
538 if not new.startswith('/'):
539 self.base_url = '/'+new
539 self.base_url = '/'+new
540 elif not new.endswith('/'):
540 elif not new.endswith('/'):
541 self.base_url = new+'/'
541 self.base_url = new+'/'
542
542
543 base_project_url = Unicode('/', config=True, help="""DEPRECATED use base_url""")
543 base_project_url = Unicode('/', config=True, help="""DEPRECATED use base_url""")
544 def _base_project_url_changed(self, name, old, new):
544 def _base_project_url_changed(self, name, old, new):
545 self.log.warn("base_project_url is deprecated, use base_url")
545 self.log.warn("base_project_url is deprecated, use base_url")
546 self.base_url = new
546 self.base_url = new
547
547
548 extra_static_paths = List(Unicode, config=True,
548 extra_static_paths = List(Unicode, config=True,
549 help="""Extra paths to search for serving static files.
549 help="""Extra paths to search for serving static files.
550
550
551 This allows adding javascript/css to be available from the notebook server machine,
551 This allows adding javascript/css to be available from the notebook server machine,
552 or overriding individual files in the IPython"""
552 or overriding individual files in the IPython"""
553 )
553 )
554 def _extra_static_paths_default(self):
554 def _extra_static_paths_default(self):
555 return [os.path.join(self.profile_dir.location, 'static')]
555 return [os.path.join(self.profile_dir.location, 'static')]
556
556
557 @property
557 @property
558 def static_file_path(self):
558 def static_file_path(self):
559 """return extra paths + the default location"""
559 """return extra paths + the default location"""
560 return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH]
560 return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH]
561
561
562 extra_template_paths = List(Unicode, config=True,
562 extra_template_paths = List(Unicode, config=True,
563 help="""Extra paths to search for serving jinja templates.
563 help="""Extra paths to search for serving jinja templates.
564
564
565 Can be used to override templates from IPython.html.templates."""
565 Can be used to override templates from IPython.html.templates."""
566 )
566 )
567 def _extra_template_paths_default(self):
567 def _extra_template_paths_default(self):
568 return []
568 return []
569
569
570 @property
570 @property
571 def template_file_path(self):
571 def template_file_path(self):
572 """return extra paths + the default locations"""
572 """return extra paths + the default locations"""
573 return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST
573 return self.extra_template_paths + DEFAULT_TEMPLATE_PATH_LIST
574
574
575 extra_nbextensions_path = List(Unicode, config=True,
575 extra_nbextensions_path = List(Unicode, config=True,
576 help="""extra paths to look for Javascript notebook extensions"""
576 help="""extra paths to look for Javascript notebook extensions"""
577 )
577 )
578
578
579 @property
579 @property
580 def nbextensions_path(self):
580 def nbextensions_path(self):
581 """The path to look for Javascript notebook extensions"""
581 """The path to look for Javascript notebook extensions"""
582 return self.extra_nbextensions_path + [os.path.join(get_ipython_dir(), 'nbextensions')] + SYSTEM_NBEXTENSIONS_DIRS
582 return self.extra_nbextensions_path + [os.path.join(get_ipython_dir(), 'nbextensions')] + SYSTEM_NBEXTENSIONS_DIRS
583
583
584 websocket_url = Unicode("", config=True,
584 websocket_url = Unicode("", config=True,
585 help="""The base URL for websockets,
585 help="""The base URL for websockets,
586 if it differs from the HTTP server (hint: it almost certainly doesn't).
586 if it differs from the HTTP server (hint: it almost certainly doesn't).
587
587
588 Should be in the form of an HTTP origin: ws[s]://hostname[:port]
588 Should be in the form of an HTTP origin: ws[s]://hostname[:port]
589 """
589 """
590 )
590 )
591 mathjax_url = Unicode("", config=True,
591 mathjax_url = Unicode("", config=True,
592 help="""The url for MathJax.js."""
592 help="""The url for MathJax.js."""
593 )
593 )
594 def _mathjax_url_default(self):
594 def _mathjax_url_default(self):
595 if not self.enable_mathjax:
595 if not self.enable_mathjax:
596 return u''
596 return u''
597 static_url_prefix = self.tornado_settings.get("static_url_prefix",
597 static_url_prefix = self.tornado_settings.get("static_url_prefix",
598 url_path_join(self.base_url, "static")
598 url_path_join(self.base_url, "static")
599 )
599 )
600
600
601 # try local mathjax, either in nbextensions/mathjax or static/mathjax
601 # try local mathjax, either in nbextensions/mathjax or static/mathjax
602 for (url_prefix, search_path) in [
602 for (url_prefix, search_path) in [
603 (url_path_join(self.base_url, "nbextensions"), self.nbextensions_path),
603 (url_path_join(self.base_url, "nbextensions"), self.nbextensions_path),
604 (static_url_prefix, self.static_file_path),
604 (static_url_prefix, self.static_file_path),
605 ]:
605 ]:
606 self.log.debug("searching for local mathjax in %s", search_path)
606 self.log.debug("searching for local mathjax in %s", search_path)
607 try:
607 try:
608 mathjax = filefind(os.path.join('mathjax', 'MathJax.js'), search_path)
608 mathjax = filefind(os.path.join('mathjax', 'MathJax.js'), search_path)
609 except IOError:
609 except IOError:
610 continue
610 continue
611 else:
611 else:
612 url = url_path_join(url_prefix, u"mathjax/MathJax.js")
612 url = url_path_join(url_prefix, u"mathjax/MathJax.js")
613 self.log.info("Serving local MathJax from %s at %s", mathjax, url)
613 self.log.info("Serving local MathJax from %s at %s", mathjax, url)
614 return url
614 return url
615
615
616 # no local mathjax, serve from CDN
616 # no local mathjax, serve from CDN
617 url = u"https://cdn.mathjax.org/mathjax/latest/MathJax.js"
617 url = u"https://cdn.mathjax.org/mathjax/latest/MathJax.js"
618 self.log.info("Using MathJax from CDN: %s", url)
618 self.log.info("Using MathJax from CDN: %s", url)
619 return url
619 return url
620
620
621 def _mathjax_url_changed(self, name, old, new):
621 def _mathjax_url_changed(self, name, old, new):
622 if new and not self.enable_mathjax:
622 if new and not self.enable_mathjax:
623 # enable_mathjax=False overrides mathjax_url
623 # enable_mathjax=False overrides mathjax_url
624 self.mathjax_url = u''
624 self.mathjax_url = u''
625 else:
625 else:
626 self.log.info("Using MathJax: %s", new)
626 self.log.info("Using MathJax: %s", new)
627
627
628 contents_manager_class = Type(
628 contents_manager_class = Type(
629 default_value=FileContentsManager,
629 default_value=FileContentsManager,
630 klass=ContentsManager,
630 klass=ContentsManager,
631 config=True,
631 config=True,
632 help='The notebook manager class to use.'
632 help='The notebook manager class to use.'
633 )
633 )
634 kernel_manager_class = Type(
634 kernel_manager_class = Type(
635 default_value=MappingKernelManager,
635 default_value=MappingKernelManager,
636 config=True,
636 config=True,
637 help='The kernel manager class to use.'
637 help='The kernel manager class to use.'
638 )
638 )
639 session_manager_class = Type(
639 session_manager_class = Type(
640 default_value=SessionManager,
640 default_value=SessionManager,
641 config=True,
641 config=True,
642 help='The session manager class to use.'
642 help='The session manager class to use.'
643 )
643 )
644 cluster_manager_class = Type(
644 cluster_manager_class = Type(
645 default_value=ClusterManager,
645 default_value=ClusterManager,
646 config=True,
646 config=True,
647 help='The cluster manager class to use.'
647 help='The cluster manager class to use.'
648 )
648 )
649
649
650 config_manager_class = Type(
650 config_manager_class = Type(
651 default_value=ConfigManager,
651 default_value=ConfigManager,
652 config = True,
652 config = True,
653 help='The config manager class to use'
653 help='The config manager class to use'
654 )
654 )
655
655
656 kernel_spec_manager = Instance(KernelSpecManager)
656 kernel_spec_manager = Instance(KernelSpecManager)
657
657
658 kernel_spec_manager_class = Type(
658 kernel_spec_manager_class = Type(
659 default_value=KernelSpecManager,
659 default_value=KernelSpecManager,
660 config=True,
660 config=True,
661 help="""
661 help="""
662 The kernel spec manager class to use. Should be a subclass
662 The kernel spec manager class to use. Should be a subclass
663 of `IPython.kernel.kernelspec.KernelSpecManager`.
663 of `IPython.kernel.kernelspec.KernelSpecManager`.
664
664
665 The Api of KernelSpecManager is provisional and might change
665 The Api of KernelSpecManager is provisional and might change
666 without warning between this version of IPython and the next stable one.
666 without warning between this version of IPython and the next stable one.
667 """
667 """
668 )
668 )
669
669
670 login_handler_class = Type(
670 login_handler_class = Type(
671 default_value=LoginHandler,
671 default_value=LoginHandler,
672 klass=web.RequestHandler,
672 klass=web.RequestHandler,
673 config=True,
673 config=True,
674 help='The login handler class to use.',
674 help='The login handler class to use.',
675 )
675 )
676
676
677 logout_handler_class = Type(
677 logout_handler_class = Type(
678 default_value=LogoutHandler,
678 default_value=LogoutHandler,
679 klass=web.RequestHandler,
679 klass=web.RequestHandler,
680 config=True,
680 config=True,
681 help='The logout handler class to use.',
681 help='The logout handler class to use.',
682 )
682 )
683
683
684 trust_xheaders = Bool(False, config=True,
684 trust_xheaders = Bool(False, config=True,
685 help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers"
685 help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers"
686 "sent by the upstream reverse proxy. Necessary if the proxy handles SSL")
686 "sent by the upstream reverse proxy. Necessary if the proxy handles SSL")
687 )
687 )
688
688
689 info_file = Unicode()
689 info_file = Unicode()
690
690
691 def _info_file_default(self):
691 def _info_file_default(self):
692 info_file = "nbserver-%s.json"%os.getpid()
692 info_file = "nbserver-%s.json"%os.getpid()
693 return os.path.join(self.profile_dir.security_dir, info_file)
693 return os.path.join(self.profile_dir.security_dir, info_file)
694
694
695 pylab = Unicode('disabled', config=True,
695 pylab = Unicode('disabled', config=True,
696 help="""
696 help="""
697 DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib.
697 DISABLED: use %pylab or %matplotlib in the notebook to enable matplotlib.
698 """
698 """
699 )
699 )
700 def _pylab_changed(self, name, old, new):
700 def _pylab_changed(self, name, old, new):
701 """when --pylab is specified, display a warning and exit"""
701 """when --pylab is specified, display a warning and exit"""
702 if new != 'warn':
702 if new != 'warn':
703 backend = ' %s' % new
703 backend = ' %s' % new
704 else:
704 else:
705 backend = ''
705 backend = ''
706 self.log.error("Support for specifying --pylab on the command line has been removed.")
706 self.log.error("Support for specifying --pylab on the command line has been removed.")
707 self.log.error(
707 self.log.error(
708 "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.".format(backend)
708 "Please use `%pylab{0}` or `%matplotlib{0}` in the notebook itself.".format(backend)
709 )
709 )
710 self.exit(1)
710 self.exit(1)
711
711
712 notebook_dir = Unicode(config=True,
712 notebook_dir = Unicode(config=True,
713 help="The directory to use for notebooks and kernels."
713 help="The directory to use for notebooks and kernels."
714 )
714 )
715
715
716 def _notebook_dir_default(self):
716 def _notebook_dir_default(self):
717 if self.file_to_run:
717 if self.file_to_run:
718 return os.path.dirname(os.path.abspath(self.file_to_run))
718 return os.path.dirname(os.path.abspath(self.file_to_run))
719 else:
719 else:
720 return py3compat.getcwd()
720 return py3compat.getcwd()
721
721
722 def _notebook_dir_changed(self, name, old, new):
722 def _notebook_dir_changed(self, name, old, new):
723 """Do a bit of validation of the notebook dir."""
723 """Do a bit of validation of the notebook dir."""
724 if not os.path.isabs(new):
724 if not os.path.isabs(new):
725 # If we receive a non-absolute path, make it absolute.
725 # If we receive a non-absolute path, make it absolute.
726 self.notebook_dir = os.path.abspath(new)
726 self.notebook_dir = os.path.abspath(new)
727 return
727 return
728 if not os.path.isdir(new):
728 if not os.path.isdir(new):
729 raise TraitError("No such notebook dir: %r" % new)
729 raise TraitError("No such notebook dir: %r" % new)
730
730
731 # setting App.notebook_dir implies setting notebook and kernel dirs as well
731 # setting App.notebook_dir implies setting notebook and kernel dirs as well
732 self.config.FileContentsManager.root_dir = new
732 self.config.FileContentsManager.root_dir = new
733 self.config.MappingKernelManager.root_dir = new
733 self.config.MappingKernelManager.root_dir = new
734
734
735 server_extensions = List(Unicode(), config=True,
735 server_extensions = List(Unicode(), config=True,
736 help=("Python modules to load as notebook server extensions. "
736 help=("Python modules to load as notebook server extensions. "
737 "This is an experimental API, and may change in future releases.")
737 "This is an experimental API, and may change in future releases.")
738 )
738 )
739
739
740 def parse_command_line(self, argv=None):
740 def parse_command_line(self, argv=None):
741 super(NotebookApp, self).parse_command_line(argv)
741 super(NotebookApp, self).parse_command_line(argv)
742
742
743 if self.extra_args:
743 if self.extra_args:
744 arg0 = self.extra_args[0]
744 arg0 = self.extra_args[0]
745 f = os.path.abspath(arg0)
745 f = os.path.abspath(arg0)
746 self.argv.remove(arg0)
746 self.argv.remove(arg0)
747 if not os.path.exists(f):
747 if not os.path.exists(f):
748 self.log.critical("No such file or directory: %s", f)
748 self.log.critical("No such file or directory: %s", f)
749 self.exit(1)
749 self.exit(1)
750
750
751 # Use config here, to ensure that it takes higher priority than
751 # Use config here, to ensure that it takes higher priority than
752 # anything that comes from the profile.
752 # anything that comes from the profile.
753 c = Config()
753 c = Config()
754 if os.path.isdir(f):
754 if os.path.isdir(f):
755 c.NotebookApp.notebook_dir = f
755 c.NotebookApp.notebook_dir = f
756 elif os.path.isfile(f):
756 elif os.path.isfile(f):
757 c.NotebookApp.file_to_run = f
757 c.NotebookApp.file_to_run = f
758 self.update_config(c)
758 self.update_config(c)
759
759
760 def init_kernel_argv(self):
760 def init_kernel_argv(self):
761 """add the profile-dir to arguments to be passed to IPython kernels"""
761 """add the profile-dir to arguments to be passed to IPython kernels"""
762 # FIXME: remove special treatment of IPython kernels
762 # FIXME: remove special treatment of IPython kernels
763 # Kernel should get *absolute* path to profile directory
763 # Kernel should get *absolute* path to profile directory
764 self.ipython_kernel_argv = ["--profile-dir", self.profile_dir.location]
764 self.ipython_kernel_argv = ["--profile-dir", self.profile_dir.location]
765
765
766 def init_configurables(self):
766 def init_configurables(self):
767 # force Session default to be secure
768 default_secure(self.config)
769
770 self.kernel_spec_manager = self.kernel_spec_manager_class(
767 self.kernel_spec_manager = self.kernel_spec_manager_class(
771 parent=self,
768 parent=self,
772 ipython_dir=self.ipython_dir,
769 ipython_dir=self.ipython_dir,
773 )
770 )
774 self.kernel_manager = self.kernel_manager_class(
771 self.kernel_manager = self.kernel_manager_class(
775 parent=self,
772 parent=self,
776 log=self.log,
773 log=self.log,
777 ipython_kernel_argv=self.ipython_kernel_argv,
774 ipython_kernel_argv=self.ipython_kernel_argv,
778 connection_dir=self.profile_dir.security_dir,
775 connection_dir=self.profile_dir.security_dir,
779 )
776 )
780 self.contents_manager = self.contents_manager_class(
777 self.contents_manager = self.contents_manager_class(
781 parent=self,
778 parent=self,
782 log=self.log,
779 log=self.log,
783 )
780 )
784 self.session_manager = self.session_manager_class(
781 self.session_manager = self.session_manager_class(
785 parent=self,
782 parent=self,
786 log=self.log,
783 log=self.log,
787 kernel_manager=self.kernel_manager,
784 kernel_manager=self.kernel_manager,
788 contents_manager=self.contents_manager,
785 contents_manager=self.contents_manager,
789 )
786 )
790 self.cluster_manager = self.cluster_manager_class(
787 self.cluster_manager = self.cluster_manager_class(
791 parent=self,
788 parent=self,
792 log=self.log,
789 log=self.log,
793 )
790 )
794
791
795 self.config_manager = self.config_manager_class(
792 self.config_manager = self.config_manager_class(
796 parent=self,
793 parent=self,
797 log=self.log,
794 log=self.log,
798 profile_dir=self.profile_dir.location,
795 profile_dir=self.profile_dir.location,
799 )
796 )
800
797
801 def init_logging(self):
798 def init_logging(self):
802 # This prevents double log messages because tornado use a root logger that
799 # This prevents double log messages because tornado use a root logger that
803 # self.log is a child of. The logging module dipatches log messages to a log
800 # self.log is a child of. The logging module dipatches log messages to a log
804 # and all of its ancenstors until propagate is set to False.
801 # and all of its ancenstors until propagate is set to False.
805 self.log.propagate = False
802 self.log.propagate = False
806
803
807 for log in app_log, access_log, gen_log:
804 for log in app_log, access_log, gen_log:
808 # consistent log output name (NotebookApp instead of tornado.access, etc.)
805 # consistent log output name (NotebookApp instead of tornado.access, etc.)
809 log.name = self.log.name
806 log.name = self.log.name
810 # hook up tornado 3's loggers to our app handlers
807 # hook up tornado 3's loggers to our app handlers
811 logger = logging.getLogger('tornado')
808 logger = logging.getLogger('tornado')
812 logger.propagate = True
809 logger.propagate = True
813 logger.parent = self.log
810 logger.parent = self.log
814 logger.setLevel(self.log.level)
811 logger.setLevel(self.log.level)
815
812
816 def init_webapp(self):
813 def init_webapp(self):
817 """initialize tornado webapp and httpserver"""
814 """initialize tornado webapp and httpserver"""
818 self.tornado_settings['allow_origin'] = self.allow_origin
815 self.tornado_settings['allow_origin'] = self.allow_origin
819 if self.allow_origin_pat:
816 if self.allow_origin_pat:
820 self.tornado_settings['allow_origin_pat'] = re.compile(self.allow_origin_pat)
817 self.tornado_settings['allow_origin_pat'] = re.compile(self.allow_origin_pat)
821 self.tornado_settings['allow_credentials'] = self.allow_credentials
818 self.tornado_settings['allow_credentials'] = self.allow_credentials
822 # ensure default_url starts with base_url
819 # ensure default_url starts with base_url
823 if not self.default_url.startswith(self.base_url):
820 if not self.default_url.startswith(self.base_url):
824 self.default_url = url_path_join(self.base_url, self.default_url)
821 self.default_url = url_path_join(self.base_url, self.default_url)
825
822
826 self.web_app = NotebookWebApplication(
823 self.web_app = NotebookWebApplication(
827 self, self.kernel_manager, self.contents_manager,
824 self, self.kernel_manager, self.contents_manager,
828 self.cluster_manager, self.session_manager, self.kernel_spec_manager,
825 self.cluster_manager, self.session_manager, self.kernel_spec_manager,
829 self.config_manager,
826 self.config_manager,
830 self.log, self.base_url, self.default_url, self.tornado_settings,
827 self.log, self.base_url, self.default_url, self.tornado_settings,
831 self.jinja_environment_options
828 self.jinja_environment_options
832 )
829 )
833 ssl_options = self.ssl_options
830 ssl_options = self.ssl_options
834 if self.certfile:
831 if self.certfile:
835 ssl_options['certfile'] = self.certfile
832 ssl_options['certfile'] = self.certfile
836 if self.keyfile:
833 if self.keyfile:
837 ssl_options['keyfile'] = self.keyfile
834 ssl_options['keyfile'] = self.keyfile
838 if not ssl_options:
835 if not ssl_options:
839 # None indicates no SSL config
836 # None indicates no SSL config
840 ssl_options = None
837 ssl_options = None
841 self.login_handler_class.validate_security(self, ssl_options=ssl_options)
838 self.login_handler_class.validate_security(self, ssl_options=ssl_options)
842 self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options,
839 self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options,
843 xheaders=self.trust_xheaders)
840 xheaders=self.trust_xheaders)
844
841
845 success = None
842 success = None
846 for port in random_ports(self.port, self.port_retries+1):
843 for port in random_ports(self.port, self.port_retries+1):
847 try:
844 try:
848 self.http_server.listen(port, self.ip)
845 self.http_server.listen(port, self.ip)
849 except socket.error as e:
846 except socket.error as e:
850 if e.errno == errno.EADDRINUSE:
847 if e.errno == errno.EADDRINUSE:
851 self.log.info('The port %i is already in use, trying another random port.' % port)
848 self.log.info('The port %i is already in use, trying another random port.' % port)
852 continue
849 continue
853 elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)):
850 elif e.errno in (errno.EACCES, getattr(errno, 'WSAEACCES', errno.EACCES)):
854 self.log.warn("Permission to listen on port %i denied" % port)
851 self.log.warn("Permission to listen on port %i denied" % port)
855 continue
852 continue
856 else:
853 else:
857 raise
854 raise
858 else:
855 else:
859 self.port = port
856 self.port = port
860 success = True
857 success = True
861 break
858 break
862 if not success:
859 if not success:
863 self.log.critical('ERROR: the notebook server could not be started because '
860 self.log.critical('ERROR: the notebook server could not be started because '
864 'no available port could be found.')
861 'no available port could be found.')
865 self.exit(1)
862 self.exit(1)
866
863
867 @property
864 @property
868 def display_url(self):
865 def display_url(self):
869 ip = self.ip if self.ip else '[all ip addresses on your system]'
866 ip = self.ip if self.ip else '[all ip addresses on your system]'
870 return self._url(ip)
867 return self._url(ip)
871
868
872 @property
869 @property
873 def connection_url(self):
870 def connection_url(self):
874 ip = self.ip if self.ip else 'localhost'
871 ip = self.ip if self.ip else 'localhost'
875 return self._url(ip)
872 return self._url(ip)
876
873
877 def _url(self, ip):
874 def _url(self, ip):
878 proto = 'https' if self.certfile else 'http'
875 proto = 'https' if self.certfile else 'http'
879 return "%s://%s:%i%s" % (proto, ip, self.port, self.base_url)
876 return "%s://%s:%i%s" % (proto, ip, self.port, self.base_url)
880
877
881 def init_terminals(self):
878 def init_terminals(self):
882 try:
879 try:
883 from .terminal import initialize
880 from .terminal import initialize
884 initialize(self.web_app)
881 initialize(self.web_app)
885 self.web_app.settings['terminals_available'] = True
882 self.web_app.settings['terminals_available'] = True
886 except ImportError as e:
883 except ImportError as e:
887 log = self.log.debug if sys.platform == 'win32' else self.log.warn
884 log = self.log.debug if sys.platform == 'win32' else self.log.warn
888 log("Terminals not available (error was %s)", e)
885 log("Terminals not available (error was %s)", e)
889
886
890 def init_signal(self):
887 def init_signal(self):
891 if not sys.platform.startswith('win'):
888 if not sys.platform.startswith('win'):
892 signal.signal(signal.SIGINT, self._handle_sigint)
889 signal.signal(signal.SIGINT, self._handle_sigint)
893 signal.signal(signal.SIGTERM, self._signal_stop)
890 signal.signal(signal.SIGTERM, self._signal_stop)
894 if hasattr(signal, 'SIGUSR1'):
891 if hasattr(signal, 'SIGUSR1'):
895 # Windows doesn't support SIGUSR1
892 # Windows doesn't support SIGUSR1
896 signal.signal(signal.SIGUSR1, self._signal_info)
893 signal.signal(signal.SIGUSR1, self._signal_info)
897 if hasattr(signal, 'SIGINFO'):
894 if hasattr(signal, 'SIGINFO'):
898 # only on BSD-based systems
895 # only on BSD-based systems
899 signal.signal(signal.SIGINFO, self._signal_info)
896 signal.signal(signal.SIGINFO, self._signal_info)
900
897
901 def _handle_sigint(self, sig, frame):
898 def _handle_sigint(self, sig, frame):
902 """SIGINT handler spawns confirmation dialog"""
899 """SIGINT handler spawns confirmation dialog"""
903 # register more forceful signal handler for ^C^C case
900 # register more forceful signal handler for ^C^C case
904 signal.signal(signal.SIGINT, self._signal_stop)
901 signal.signal(signal.SIGINT, self._signal_stop)
905 # request confirmation dialog in bg thread, to avoid
902 # request confirmation dialog in bg thread, to avoid
906 # blocking the App
903 # blocking the App
907 thread = threading.Thread(target=self._confirm_exit)
904 thread = threading.Thread(target=self._confirm_exit)
908 thread.daemon = True
905 thread.daemon = True
909 thread.start()
906 thread.start()
910
907
911 def _restore_sigint_handler(self):
908 def _restore_sigint_handler(self):
912 """callback for restoring original SIGINT handler"""
909 """callback for restoring original SIGINT handler"""
913 signal.signal(signal.SIGINT, self._handle_sigint)
910 signal.signal(signal.SIGINT, self._handle_sigint)
914
911
915 def _confirm_exit(self):
912 def _confirm_exit(self):
916 """confirm shutdown on ^C
913 """confirm shutdown on ^C
917
914
918 A second ^C, or answering 'y' within 5s will cause shutdown,
915 A second ^C, or answering 'y' within 5s will cause shutdown,
919 otherwise original SIGINT handler will be restored.
916 otherwise original SIGINT handler will be restored.
920
917
921 This doesn't work on Windows.
918 This doesn't work on Windows.
922 """
919 """
923 info = self.log.info
920 info = self.log.info
924 info('interrupted')
921 info('interrupted')
925 print(self.notebook_info())
922 print(self.notebook_info())
926 sys.stdout.write("Shutdown this notebook server (y/[n])? ")
923 sys.stdout.write("Shutdown this notebook server (y/[n])? ")
927 sys.stdout.flush()
924 sys.stdout.flush()
928 r,w,x = select.select([sys.stdin], [], [], 5)
925 r,w,x = select.select([sys.stdin], [], [], 5)
929 if r:
926 if r:
930 line = sys.stdin.readline()
927 line = sys.stdin.readline()
931 if line.lower().startswith('y') and 'n' not in line.lower():
928 if line.lower().startswith('y') and 'n' not in line.lower():
932 self.log.critical("Shutdown confirmed")
929 self.log.critical("Shutdown confirmed")
933 ioloop.IOLoop.current().stop()
930 ioloop.IOLoop.current().stop()
934 return
931 return
935 else:
932 else:
936 print("No answer for 5s:", end=' ')
933 print("No answer for 5s:", end=' ')
937 print("resuming operation...")
934 print("resuming operation...")
938 # no answer, or answer is no:
935 # no answer, or answer is no:
939 # set it back to original SIGINT handler
936 # set it back to original SIGINT handler
940 # use IOLoop.add_callback because signal.signal must be called
937 # use IOLoop.add_callback because signal.signal must be called
941 # from main thread
938 # from main thread
942 ioloop.IOLoop.current().add_callback(self._restore_sigint_handler)
939 ioloop.IOLoop.current().add_callback(self._restore_sigint_handler)
943
940
944 def _signal_stop(self, sig, frame):
941 def _signal_stop(self, sig, frame):
945 self.log.critical("received signal %s, stopping", sig)
942 self.log.critical("received signal %s, stopping", sig)
946 ioloop.IOLoop.current().stop()
943 ioloop.IOLoop.current().stop()
947
944
948 def _signal_info(self, sig, frame):
945 def _signal_info(self, sig, frame):
949 print(self.notebook_info())
946 print(self.notebook_info())
950
947
951 def init_components(self):
948 def init_components(self):
952 """Check the components submodule, and warn if it's unclean"""
949 """Check the components submodule, and warn if it's unclean"""
953 status = submodule.check_submodule_status()
950 status = submodule.check_submodule_status()
954 if status == 'missing':
951 if status == 'missing':
955 self.log.warn("components submodule missing, running `git submodule update`")
952 self.log.warn("components submodule missing, running `git submodule update`")
956 submodule.update_submodules(submodule.ipython_parent())
953 submodule.update_submodules(submodule.ipython_parent())
957 elif status == 'unclean':
954 elif status == 'unclean':
958 self.log.warn("components submodule unclean, you may see 404s on static/components")
955 self.log.warn("components submodule unclean, you may see 404s on static/components")
959 self.log.warn("run `setup.py submodule` or `git submodule update` to update")
956 self.log.warn("run `setup.py submodule` or `git submodule update` to update")
960
957
961 def init_server_extensions(self):
958 def init_server_extensions(self):
962 """Load any extensions specified by config.
959 """Load any extensions specified by config.
963
960
964 Import the module, then call the load_jupyter_server_extension function,
961 Import the module, then call the load_jupyter_server_extension function,
965 if one exists.
962 if one exists.
966
963
967 The extension API is experimental, and may change in future releases.
964 The extension API is experimental, and may change in future releases.
968 """
965 """
969 for modulename in self.server_extensions:
966 for modulename in self.server_extensions:
970 try:
967 try:
971 mod = importlib.import_module(modulename)
968 mod = importlib.import_module(modulename)
972 func = getattr(mod, 'load_jupyter_server_extension', None)
969 func = getattr(mod, 'load_jupyter_server_extension', None)
973 if func is not None:
970 if func is not None:
974 func(self)
971 func(self)
975 except Exception:
972 except Exception:
976 self.log.warn("Error loading server extension %s", modulename,
973 self.log.warn("Error loading server extension %s", modulename,
977 exc_info=True)
974 exc_info=True)
978
975
979 @catch_config_error
976 @catch_config_error
980 def initialize(self, argv=None):
977 def initialize(self, argv=None):
981 super(NotebookApp, self).initialize(argv)
978 super(NotebookApp, self).initialize(argv)
982 self.init_logging()
979 self.init_logging()
983 self.init_kernel_argv()
980 self.init_kernel_argv()
984 self.init_configurables()
981 self.init_configurables()
985 self.init_components()
982 self.init_components()
986 self.init_webapp()
983 self.init_webapp()
987 self.init_terminals()
984 self.init_terminals()
988 self.init_signal()
985 self.init_signal()
989 self.init_server_extensions()
986 self.init_server_extensions()
990
987
991 def cleanup_kernels(self):
988 def cleanup_kernels(self):
992 """Shutdown all kernels.
989 """Shutdown all kernels.
993
990
994 The kernels will shutdown themselves when this process no longer exists,
991 The kernels will shutdown themselves when this process no longer exists,
995 but explicit shutdown allows the KernelManagers to cleanup the connection files.
992 but explicit shutdown allows the KernelManagers to cleanup the connection files.
996 """
993 """
997 self.log.info('Shutting down kernels')
994 self.log.info('Shutting down kernels')
998 self.kernel_manager.shutdown_all()
995 self.kernel_manager.shutdown_all()
999
996
1000 def notebook_info(self):
997 def notebook_info(self):
1001 "Return the current working directory and the server url information"
998 "Return the current working directory and the server url information"
1002 info = self.contents_manager.info_string() + "\n"
999 info = self.contents_manager.info_string() + "\n"
1003 info += "%d active kernels \n" % len(self.kernel_manager._kernels)
1000 info += "%d active kernels \n" % len(self.kernel_manager._kernels)
1004 return info + "The IPython Notebook is running at: %s" % self.display_url
1001 return info + "The IPython Notebook is running at: %s" % self.display_url
1005
1002
1006 def server_info(self):
1003 def server_info(self):
1007 """Return a JSONable dict of information about this server."""
1004 """Return a JSONable dict of information about this server."""
1008 return {'url': self.connection_url,
1005 return {'url': self.connection_url,
1009 'hostname': self.ip if self.ip else 'localhost',
1006 'hostname': self.ip if self.ip else 'localhost',
1010 'port': self.port,
1007 'port': self.port,
1011 'secure': bool(self.certfile),
1008 'secure': bool(self.certfile),
1012 'base_url': self.base_url,
1009 'base_url': self.base_url,
1013 'notebook_dir': os.path.abspath(self.notebook_dir),
1010 'notebook_dir': os.path.abspath(self.notebook_dir),
1014 'pid': os.getpid()
1011 'pid': os.getpid()
1015 }
1012 }
1016
1013
1017 def write_server_info_file(self):
1014 def write_server_info_file(self):
1018 """Write the result of server_info() to the JSON file info_file."""
1015 """Write the result of server_info() to the JSON file info_file."""
1019 with open(self.info_file, 'w') as f:
1016 with open(self.info_file, 'w') as f:
1020 json.dump(self.server_info(), f, indent=2)
1017 json.dump(self.server_info(), f, indent=2)
1021
1018
1022 def remove_server_info_file(self):
1019 def remove_server_info_file(self):
1023 """Remove the nbserver-<pid>.json file created for this server.
1020 """Remove the nbserver-<pid>.json file created for this server.
1024
1021
1025 Ignores the error raised when the file has already been removed.
1022 Ignores the error raised when the file has already been removed.
1026 """
1023 """
1027 try:
1024 try:
1028 os.unlink(self.info_file)
1025 os.unlink(self.info_file)
1029 except OSError as e:
1026 except OSError as e:
1030 if e.errno != errno.ENOENT:
1027 if e.errno != errno.ENOENT:
1031 raise
1028 raise
1032
1029
1033 def start(self):
1030 def start(self):
1034 """ Start the IPython Notebook server app, after initialization
1031 """ Start the IPython Notebook server app, after initialization
1035
1032
1036 This method takes no arguments so all configuration and initialization
1033 This method takes no arguments so all configuration and initialization
1037 must be done prior to calling this method."""
1034 must be done prior to calling this method."""
1038 if self.subapp is not None:
1035 if self.subapp is not None:
1039 return self.subapp.start()
1036 return self.subapp.start()
1040
1037
1041 info = self.log.info
1038 info = self.log.info
1042 for line in self.notebook_info().split("\n"):
1039 for line in self.notebook_info().split("\n"):
1043 info(line)
1040 info(line)
1044 info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).")
1041 info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).")
1045
1042
1046 self.write_server_info_file()
1043 self.write_server_info_file()
1047
1044
1048 if self.open_browser or self.file_to_run:
1045 if self.open_browser or self.file_to_run:
1049 try:
1046 try:
1050 browser = webbrowser.get(self.browser or None)
1047 browser = webbrowser.get(self.browser or None)
1051 except webbrowser.Error as e:
1048 except webbrowser.Error as e:
1052 self.log.warn('No web browser found: %s.' % e)
1049 self.log.warn('No web browser found: %s.' % e)
1053 browser = None
1050 browser = None
1054
1051
1055 if self.file_to_run:
1052 if self.file_to_run:
1056 if not os.path.exists(self.file_to_run):
1053 if not os.path.exists(self.file_to_run):
1057 self.log.critical("%s does not exist" % self.file_to_run)
1054 self.log.critical("%s does not exist" % self.file_to_run)
1058 self.exit(1)
1055 self.exit(1)
1059
1056
1060 relpath = os.path.relpath(self.file_to_run, self.notebook_dir)
1057 relpath = os.path.relpath(self.file_to_run, self.notebook_dir)
1061 uri = url_path_join('notebooks', *relpath.split(os.sep))
1058 uri = url_path_join('notebooks', *relpath.split(os.sep))
1062 else:
1059 else:
1063 uri = 'tree'
1060 uri = 'tree'
1064 if browser:
1061 if browser:
1065 b = lambda : browser.open(url_path_join(self.connection_url, uri),
1062 b = lambda : browser.open(url_path_join(self.connection_url, uri),
1066 new=2)
1063 new=2)
1067 threading.Thread(target=b).start()
1064 threading.Thread(target=b).start()
1068
1065
1069 self.io_loop = ioloop.IOLoop.current()
1066 self.io_loop = ioloop.IOLoop.current()
1070 if sys.platform.startswith('win'):
1067 if sys.platform.startswith('win'):
1071 # add no-op to wake every 5s
1068 # add no-op to wake every 5s
1072 # to handle signals that may be ignored by the inner loop
1069 # to handle signals that may be ignored by the inner loop
1073 pc = ioloop.PeriodicCallback(lambda : None, 5000)
1070 pc = ioloop.PeriodicCallback(lambda : None, 5000)
1074 pc.start()
1071 pc.start()
1075 try:
1072 try:
1076 self.io_loop.start()
1073 self.io_loop.start()
1077 except KeyboardInterrupt:
1074 except KeyboardInterrupt:
1078 info("Interrupted...")
1075 info("Interrupted...")
1079 finally:
1076 finally:
1080 self.cleanup_kernels()
1077 self.cleanup_kernels()
1081 self.remove_server_info_file()
1078 self.remove_server_info_file()
1082
1079
1083 def stop(self):
1080 def stop(self):
1084 def _stop():
1081 def _stop():
1085 self.http_server.stop()
1082 self.http_server.stop()
1086 self.io_loop.stop()
1083 self.io_loop.stop()
1087 self.io_loop.add_callback(_stop)
1084 self.io_loop.add_callback(_stop)
1088
1085
1089
1086
1090 def list_running_servers(profile='default'):
1087 def list_running_servers(profile='default'):
1091 """Iterate over the server info files of running notebook servers.
1088 """Iterate over the server info files of running notebook servers.
1092
1089
1093 Given a profile name, find nbserver-* files in the security directory of
1090 Given a profile name, find nbserver-* files in the security directory of
1094 that profile, and yield dicts of their information, each one pertaining to
1091 that profile, and yield dicts of their information, each one pertaining to
1095 a currently running notebook server instance.
1092 a currently running notebook server instance.
1096 """
1093 """
1097 pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), name=profile)
1094 pd = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), name=profile)
1098 for file in os.listdir(pd.security_dir):
1095 for file in os.listdir(pd.security_dir):
1099 if file.startswith('nbserver-'):
1096 if file.startswith('nbserver-'):
1100 with io.open(os.path.join(pd.security_dir, file), encoding='utf-8') as f:
1097 with io.open(os.path.join(pd.security_dir, file), encoding='utf-8') as f:
1101 info = json.load(f)
1098 info = json.load(f)
1102
1099
1103 # Simple check whether that process is really still running
1100 # Simple check whether that process is really still running
1104 # Also remove leftover files from IPython 2.x without a pid field
1101 # Also remove leftover files from IPython 2.x without a pid field
1105 if ('pid' in info) and check_pid(info['pid']):
1102 if ('pid' in info) and check_pid(info['pid']):
1106 yield info
1103 yield info
1107 else:
1104 else:
1108 # If the process has died, try to delete its info file
1105 # If the process has died, try to delete its info file
1109 try:
1106 try:
1110 os.unlink(file)
1107 os.unlink(file)
1111 except OSError:
1108 except OSError:
1112 pass # TODO: This should warn or log or something
1109 pass # TODO: This should warn or log or something
1113 #-----------------------------------------------------------------------------
1110 #-----------------------------------------------------------------------------
1114 # Main entry point
1111 # Main entry point
1115 #-----------------------------------------------------------------------------
1112 #-----------------------------------------------------------------------------
1116
1113
1117 launch_new_instance = NotebookApp.launch_instance
1114 launch_new_instance = NotebookApp.launch_instance
1118
1115
@@ -1,286 +1,288 b''
1 """Tornado handlers for kernels."""
1 """Tornado handlers for kernels."""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 import json
6 import json
7 import logging
7 import logging
8 from tornado import gen, web
8 from tornado import gen, web
9 from tornado.concurrent import Future
9 from tornado.concurrent import Future
10 from tornado.ioloop import IOLoop
10 from tornado.ioloop import IOLoop
11
11
12 from IPython.utils.jsonutil import date_default
12 from IPython.utils.jsonutil import date_default
13 from IPython.utils.py3compat import cast_unicode
13 from IPython.utils.py3compat import cast_unicode
14 from IPython.html.utils import url_path_join, url_escape
14 from IPython.html.utils import url_path_join, url_escape
15
15
16 from ...base.handlers import IPythonHandler, json_errors
16 from ...base.handlers import IPythonHandler, json_errors
17 from ...base.zmqhandlers import AuthenticatedZMQStreamHandler, deserialize_binary_message
17 from ...base.zmqhandlers import AuthenticatedZMQStreamHandler, deserialize_binary_message
18
18
19 from IPython.core.release import kernel_protocol_version
19 from IPython.core.release import kernel_protocol_version
20
20
21 class MainKernelHandler(IPythonHandler):
21 class MainKernelHandler(IPythonHandler):
22
22
23 @web.authenticated
23 @web.authenticated
24 @json_errors
24 @json_errors
25 def get(self):
25 def get(self):
26 km = self.kernel_manager
26 km = self.kernel_manager
27 self.finish(json.dumps(km.list_kernels()))
27 self.finish(json.dumps(km.list_kernels()))
28
28
29 @web.authenticated
29 @web.authenticated
30 @json_errors
30 @json_errors
31 def post(self):
31 def post(self):
32 km = self.kernel_manager
32 km = self.kernel_manager
33 model = self.get_json_body()
33 model = self.get_json_body()
34 if model is None:
34 if model is None:
35 model = {
35 model = {
36 'name': km.default_kernel_name
36 'name': km.default_kernel_name
37 }
37 }
38 else:
38 else:
39 model.setdefault('name', km.default_kernel_name)
39 model.setdefault('name', km.default_kernel_name)
40
40
41 kernel_id = km.start_kernel(kernel_name=model['name'])
41 kernel_id = km.start_kernel(kernel_name=model['name'])
42 model = km.kernel_model(kernel_id)
42 model = km.kernel_model(kernel_id)
43 location = url_path_join(self.base_url, 'api', 'kernels', kernel_id)
43 location = url_path_join(self.base_url, 'api', 'kernels', kernel_id)
44 self.set_header('Location', url_escape(location))
44 self.set_header('Location', url_escape(location))
45 self.set_status(201)
45 self.set_status(201)
46 self.finish(json.dumps(model))
46 self.finish(json.dumps(model))
47
47
48
48
49 class KernelHandler(IPythonHandler):
49 class KernelHandler(IPythonHandler):
50
50
51 SUPPORTED_METHODS = ('DELETE', 'GET')
51 SUPPORTED_METHODS = ('DELETE', 'GET')
52
52
53 @web.authenticated
53 @web.authenticated
54 @json_errors
54 @json_errors
55 def get(self, kernel_id):
55 def get(self, kernel_id):
56 km = self.kernel_manager
56 km = self.kernel_manager
57 km._check_kernel_id(kernel_id)
57 km._check_kernel_id(kernel_id)
58 model = km.kernel_model(kernel_id)
58 model = km.kernel_model(kernel_id)
59 self.finish(json.dumps(model))
59 self.finish(json.dumps(model))
60
60
61 @web.authenticated
61 @web.authenticated
62 @json_errors
62 @json_errors
63 def delete(self, kernel_id):
63 def delete(self, kernel_id):
64 km = self.kernel_manager
64 km = self.kernel_manager
65 km.shutdown_kernel(kernel_id)
65 km.shutdown_kernel(kernel_id)
66 self.set_status(204)
66 self.set_status(204)
67 self.finish()
67 self.finish()
68
68
69
69
70 class KernelActionHandler(IPythonHandler):
70 class KernelActionHandler(IPythonHandler):
71
71
72 @web.authenticated
72 @web.authenticated
73 @json_errors
73 @json_errors
74 def post(self, kernel_id, action):
74 def post(self, kernel_id, action):
75 km = self.kernel_manager
75 km = self.kernel_manager
76 if action == 'interrupt':
76 if action == 'interrupt':
77 km.interrupt_kernel(kernel_id)
77 km.interrupt_kernel(kernel_id)
78 self.set_status(204)
78 self.set_status(204)
79 if action == 'restart':
79 if action == 'restart':
80 km.restart_kernel(kernel_id)
80 km.restart_kernel(kernel_id)
81 model = km.kernel_model(kernel_id)
81 model = km.kernel_model(kernel_id)
82 self.set_header('Location', '{0}api/kernels/{1}'.format(self.base_url, kernel_id))
82 self.set_header('Location', '{0}api/kernels/{1}'.format(self.base_url, kernel_id))
83 self.write(json.dumps(model))
83 self.write(json.dumps(model))
84 self.finish()
84 self.finish()
85
85
86
86
87 class ZMQChannelsHandler(AuthenticatedZMQStreamHandler):
87 class ZMQChannelsHandler(AuthenticatedZMQStreamHandler):
88
88
89 @property
89 @property
90 def kernel_info_timeout(self):
90 def kernel_info_timeout(self):
91 return self.settings.get('kernel_info_timeout', 10)
91 return self.settings.get('kernel_info_timeout', 10)
92
92
93 def __repr__(self):
93 def __repr__(self):
94 return "%s(%s)" % (self.__class__.__name__, getattr(self, 'kernel_id', 'uninitialized'))
94 return "%s(%s)" % (self.__class__.__name__, getattr(self, 'kernel_id', 'uninitialized'))
95
95
96 def create_stream(self):
96 def create_stream(self):
97 km = self.kernel_manager
97 km = self.kernel_manager
98 identity = self.session.bsession
98 identity = self.session.bsession
99 for channel in ('shell', 'iopub', 'stdin'):
99 for channel in ('shell', 'iopub', 'stdin'):
100 meth = getattr(km, 'connect_' + channel)
100 meth = getattr(km, 'connect_' + channel)
101 self.channels[channel] = stream = meth(self.kernel_id, identity=identity)
101 self.channels[channel] = stream = meth(self.kernel_id, identity=identity)
102 stream.channel = channel
102 stream.channel = channel
103 km.add_restart_callback(self.kernel_id, self.on_kernel_restarted)
103 km.add_restart_callback(self.kernel_id, self.on_kernel_restarted)
104 km.add_restart_callback(self.kernel_id, self.on_restart_failed, 'dead')
104 km.add_restart_callback(self.kernel_id, self.on_restart_failed, 'dead')
105
105
106 def request_kernel_info(self):
106 def request_kernel_info(self):
107 """send a request for kernel_info"""
107 """send a request for kernel_info"""
108 km = self.kernel_manager
108 km = self.kernel_manager
109 kernel = km.get_kernel(self.kernel_id)
109 kernel = km.get_kernel(self.kernel_id)
110 try:
110 try:
111 # check for previous request
111 # check for previous request
112 future = kernel._kernel_info_future
112 future = kernel._kernel_info_future
113 except AttributeError:
113 except AttributeError:
114 self.log.debug("Requesting kernel info from %s", self.kernel_id)
114 self.log.debug("Requesting kernel info from %s", self.kernel_id)
115 # Create a kernel_info channel to query the kernel protocol version.
115 # Create a kernel_info channel to query the kernel protocol version.
116 # This channel will be closed after the kernel_info reply is received.
116 # This channel will be closed after the kernel_info reply is received.
117 if self.kernel_info_channel is None:
117 if self.kernel_info_channel is None:
118 self.kernel_info_channel = km.connect_shell(self.kernel_id)
118 self.kernel_info_channel = km.connect_shell(self.kernel_id)
119 self.kernel_info_channel.on_recv(self._handle_kernel_info_reply)
119 self.kernel_info_channel.on_recv(self._handle_kernel_info_reply)
120 self.session.send(self.kernel_info_channel, "kernel_info_request")
120 self.session.send(self.kernel_info_channel, "kernel_info_request")
121 # store the future on the kernel, so only one request is sent
121 # store the future on the kernel, so only one request is sent
122 kernel._kernel_info_future = self._kernel_info_future
122 kernel._kernel_info_future = self._kernel_info_future
123 else:
123 else:
124 if not future.done():
124 if not future.done():
125 self.log.debug("Waiting for pending kernel_info request")
125 self.log.debug("Waiting for pending kernel_info request")
126 future.add_done_callback(lambda f: self._finish_kernel_info(f.result()))
126 future.add_done_callback(lambda f: self._finish_kernel_info(f.result()))
127 return self._kernel_info_future
127 return self._kernel_info_future
128
128
129 def _handle_kernel_info_reply(self, msg):
129 def _handle_kernel_info_reply(self, msg):
130 """process the kernel_info_reply
130 """process the kernel_info_reply
131
131
132 enabling msg spec adaptation, if necessary
132 enabling msg spec adaptation, if necessary
133 """
133 """
134 idents,msg = self.session.feed_identities(msg)
134 idents,msg = self.session.feed_identities(msg)
135 try:
135 try:
136 msg = self.session.deserialize(msg)
136 msg = self.session.deserialize(msg)
137 except:
137 except:
138 self.log.error("Bad kernel_info reply", exc_info=True)
138 self.log.error("Bad kernel_info reply", exc_info=True)
139 self._kernel_info_future.set_result({})
139 self._kernel_info_future.set_result({})
140 return
140 return
141 else:
141 else:
142 info = msg['content']
142 info = msg['content']
143 self.log.debug("Received kernel info: %s", info)
143 self.log.debug("Received kernel info: %s", info)
144 if msg['msg_type'] != 'kernel_info_reply' or 'protocol_version' not in info:
144 if msg['msg_type'] != 'kernel_info_reply' or 'protocol_version' not in info:
145 self.log.error("Kernel info request failed, assuming current %s", info)
145 self.log.error("Kernel info request failed, assuming current %s", info)
146 info = {}
146 info = {}
147 self._finish_kernel_info(info)
147 self._finish_kernel_info(info)
148
148
149 # close the kernel_info channel, we don't need it anymore
149 # close the kernel_info channel, we don't need it anymore
150 if self.kernel_info_channel:
150 if self.kernel_info_channel:
151 self.kernel_info_channel.close()
151 self.kernel_info_channel.close()
152 self.kernel_info_channel = None
152 self.kernel_info_channel = None
153
153
154 def _finish_kernel_info(self, info):
154 def _finish_kernel_info(self, info):
155 """Finish handling kernel_info reply
155 """Finish handling kernel_info reply
156
156
157 Set up protocol adaptation, if needed,
157 Set up protocol adaptation, if needed,
158 and signal that connection can continue.
158 and signal that connection can continue.
159 """
159 """
160 protocol_version = info.get('protocol_version', kernel_protocol_version)
160 protocol_version = info.get('protocol_version', kernel_protocol_version)
161 if protocol_version != kernel_protocol_version:
161 if protocol_version != kernel_protocol_version:
162 self.session.adapt_version = int(protocol_version.split('.')[0])
162 self.session.adapt_version = int(protocol_version.split('.')[0])
163 self.log.info("Adapting to protocol v%s for kernel %s", protocol_version, self.kernel_id)
163 self.log.info("Adapting to protocol v%s for kernel %s", protocol_version, self.kernel_id)
164 if not self._kernel_info_future.done():
164 if not self._kernel_info_future.done():
165 self._kernel_info_future.set_result(info)
165 self._kernel_info_future.set_result(info)
166
166
167 def initialize(self):
167 def initialize(self):
168 super(ZMQChannelsHandler, self).initialize()
168 super(ZMQChannelsHandler, self).initialize()
169 self.zmq_stream = None
169 self.zmq_stream = None
170 self.channels = {}
170 self.channels = {}
171 self.kernel_id = None
171 self.kernel_id = None
172 self.kernel_info_channel = None
172 self.kernel_info_channel = None
173 self._kernel_info_future = Future()
173 self._kernel_info_future = Future()
174
174
175 @gen.coroutine
175 @gen.coroutine
176 def pre_get(self):
176 def pre_get(self):
177 # authenticate first
177 # authenticate first
178 super(ZMQChannelsHandler, self).pre_get()
178 super(ZMQChannelsHandler, self).pre_get()
179 # then request kernel info, waiting up to a certain time before giving up.
179 # then request kernel info, waiting up to a certain time before giving up.
180 # We don't want to wait forever, because browsers don't take it well when
180 # We don't want to wait forever, because browsers don't take it well when
181 # servers never respond to websocket connection requests.
181 # servers never respond to websocket connection requests.
182 kernel = self.kernel_manager.get_kernel(self.kernel_id)
183 self.session.key = kernel.session.key
182 future = self.request_kernel_info()
184 future = self.request_kernel_info()
183
185
184 def give_up():
186 def give_up():
185 """Don't wait forever for the kernel to reply"""
187 """Don't wait forever for the kernel to reply"""
186 if future.done():
188 if future.done():
187 return
189 return
188 self.log.warn("Timeout waiting for kernel_info reply from %s", self.kernel_id)
190 self.log.warn("Timeout waiting for kernel_info reply from %s", self.kernel_id)
189 future.set_result({})
191 future.set_result({})
190 loop = IOLoop.current()
192 loop = IOLoop.current()
191 loop.add_timeout(loop.time() + self.kernel_info_timeout, give_up)
193 loop.add_timeout(loop.time() + self.kernel_info_timeout, give_up)
192 # actually wait for it
194 # actually wait for it
193 yield future
195 yield future
194
196
195 @gen.coroutine
197 @gen.coroutine
196 def get(self, kernel_id):
198 def get(self, kernel_id):
197 self.kernel_id = cast_unicode(kernel_id, 'ascii')
199 self.kernel_id = cast_unicode(kernel_id, 'ascii')
198 yield super(ZMQChannelsHandler, self).get(kernel_id=kernel_id)
200 yield super(ZMQChannelsHandler, self).get(kernel_id=kernel_id)
199
201
200 def open(self, kernel_id):
202 def open(self, kernel_id):
201 super(ZMQChannelsHandler, self).open()
203 super(ZMQChannelsHandler, self).open()
202 try:
204 try:
203 self.create_stream()
205 self.create_stream()
204 except web.HTTPError as e:
206 except web.HTTPError as e:
205 self.log.error("Error opening stream: %s", e)
207 self.log.error("Error opening stream: %s", e)
206 # WebSockets don't response to traditional error codes so we
208 # WebSockets don't response to traditional error codes so we
207 # close the connection.
209 # close the connection.
208 for channel, stream in self.channels.items():
210 for channel, stream in self.channels.items():
209 if not stream.closed():
211 if not stream.closed():
210 stream.close()
212 stream.close()
211 self.close()
213 self.close()
212 else:
214 else:
213 for channel, stream in self.channels.items():
215 for channel, stream in self.channels.items():
214 stream.on_recv_stream(self._on_zmq_reply)
216 stream.on_recv_stream(self._on_zmq_reply)
215
217
216 def on_message(self, msg):
218 def on_message(self, msg):
217 if not self.channels:
219 if not self.channels:
218 # already closed, ignore the message
220 # already closed, ignore the message
219 self.log.debug("Received message on closed websocket %r", msg)
221 self.log.debug("Received message on closed websocket %r", msg)
220 return
222 return
221 if isinstance(msg, bytes):
223 if isinstance(msg, bytes):
222 msg = deserialize_binary_message(msg)
224 msg = deserialize_binary_message(msg)
223 else:
225 else:
224 msg = json.loads(msg)
226 msg = json.loads(msg)
225 channel = msg.pop('channel', None)
227 channel = msg.pop('channel', None)
226 if channel is None:
228 if channel is None:
227 self.log.warn("No channel specified, assuming shell: %s", msg)
229 self.log.warn("No channel specified, assuming shell: %s", msg)
228 channel = 'shell'
230 channel = 'shell'
229 if channel not in self.channels:
231 if channel not in self.channels:
230 self.log.warn("No such channel: %r", channel)
232 self.log.warn("No such channel: %r", channel)
231 return
233 return
232 stream = self.channels[channel]
234 stream = self.channels[channel]
233 self.session.send(stream, msg)
235 self.session.send(stream, msg)
234
236
235 def on_close(self):
237 def on_close(self):
236 km = self.kernel_manager
238 km = self.kernel_manager
237 if self.kernel_id in km:
239 if self.kernel_id in km:
238 km.remove_restart_callback(
240 km.remove_restart_callback(
239 self.kernel_id, self.on_kernel_restarted,
241 self.kernel_id, self.on_kernel_restarted,
240 )
242 )
241 km.remove_restart_callback(
243 km.remove_restart_callback(
242 self.kernel_id, self.on_restart_failed, 'dead',
244 self.kernel_id, self.on_restart_failed, 'dead',
243 )
245 )
244 # This method can be called twice, once by self.kernel_died and once
246 # This method can be called twice, once by self.kernel_died and once
245 # from the WebSocket close event. If the WebSocket connection is
247 # from the WebSocket close event. If the WebSocket connection is
246 # closed before the ZMQ streams are setup, they could be None.
248 # closed before the ZMQ streams are setup, they could be None.
247 for channel, stream in self.channels.items():
249 for channel, stream in self.channels.items():
248 if stream is not None and not stream.closed():
250 if stream is not None and not stream.closed():
249 stream.on_recv(None)
251 stream.on_recv(None)
250 # close the socket directly, don't wait for the stream
252 # close the socket directly, don't wait for the stream
251 socket = stream.socket
253 socket = stream.socket
252 stream.close()
254 stream.close()
253 socket.close()
255 socket.close()
254
256
255 self.channels = {}
257 self.channels = {}
256
258
257 def _send_status_message(self, status):
259 def _send_status_message(self, status):
258 msg = self.session.msg("status",
260 msg = self.session.msg("status",
259 {'execution_state': status}
261 {'execution_state': status}
260 )
262 )
261 msg['channel'] = 'iopub'
263 msg['channel'] = 'iopub'
262 self.write_message(json.dumps(msg, default=date_default))
264 self.write_message(json.dumps(msg, default=date_default))
263
265
264 def on_kernel_restarted(self):
266 def on_kernel_restarted(self):
265 logging.warn("kernel %s restarted", self.kernel_id)
267 logging.warn("kernel %s restarted", self.kernel_id)
266 self._send_status_message('restarting')
268 self._send_status_message('restarting')
267
269
268 def on_restart_failed(self):
270 def on_restart_failed(self):
269 logging.error("kernel %s restarted failed!", self.kernel_id)
271 logging.error("kernel %s restarted failed!", self.kernel_id)
270 self._send_status_message('dead')
272 self._send_status_message('dead')
271
273
272
274
273 #-----------------------------------------------------------------------------
275 #-----------------------------------------------------------------------------
274 # URL to handler mappings
276 # URL to handler mappings
275 #-----------------------------------------------------------------------------
277 #-----------------------------------------------------------------------------
276
278
277
279
278 _kernel_id_regex = r"(?P<kernel_id>\w+-\w+-\w+-\w+-\w+)"
280 _kernel_id_regex = r"(?P<kernel_id>\w+-\w+-\w+-\w+-\w+)"
279 _kernel_action_regex = r"(?P<action>restart|interrupt)"
281 _kernel_action_regex = r"(?P<action>restart|interrupt)"
280
282
281 default_handlers = [
283 default_handlers = [
282 (r"/api/kernels", MainKernelHandler),
284 (r"/api/kernels", MainKernelHandler),
283 (r"/api/kernels/%s" % _kernel_id_regex, KernelHandler),
285 (r"/api/kernels/%s" % _kernel_id_regex, KernelHandler),
284 (r"/api/kernels/%s/%s" % (_kernel_id_regex, _kernel_action_regex), KernelActionHandler),
286 (r"/api/kernels/%s/%s" % (_kernel_id_regex, _kernel_action_regex), KernelActionHandler),
285 (r"/api/kernels/%s/channels" % _kernel_id_regex, ZMQChannelsHandler),
287 (r"/api/kernels/%s/channels" % _kernel_id_regex, ZMQChannelsHandler),
286 ]
288 ]
@@ -1,172 +1,172 b''
1 """An in-process kernel"""
1 """An in-process kernel"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from contextlib import contextmanager
6 from contextlib import contextmanager
7 import logging
7 import logging
8 import sys
8 import sys
9
9
10 from IPython.core.interactiveshell import InteractiveShellABC
10 from IPython.core.interactiveshell import InteractiveShellABC
11 from IPython.utils.jsonutil import json_clean
11 from IPython.utils.jsonutil import json_clean
12 from IPython.utils.traitlets import Any, Enum, Instance, List, Type
12 from IPython.utils.traitlets import Any, Enum, Instance, List, Type
13 from IPython.kernel.zmq.ipkernel import IPythonKernel
13 from IPython.kernel.zmq.ipkernel import IPythonKernel
14 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
14 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
15
15
16 from .socket import DummySocket
16 from .socket import DummySocket
17
17
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19 # Main kernel class
19 # Main kernel class
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21
21
22 class InProcessKernel(IPythonKernel):
22 class InProcessKernel(IPythonKernel):
23
23
24 #-------------------------------------------------------------------------
24 #-------------------------------------------------------------------------
25 # InProcessKernel interface
25 # InProcessKernel interface
26 #-------------------------------------------------------------------------
26 #-------------------------------------------------------------------------
27
27
28 # The frontends connected to this kernel.
28 # The frontends connected to this kernel.
29 frontends = List(
29 frontends = List(
30 Instance('IPython.kernel.inprocess.client.InProcessKernelClient')
30 Instance('IPython.kernel.inprocess.client.InProcessKernelClient')
31 )
31 )
32
32
33 # The GUI environment that the kernel is running under. This need not be
33 # The GUI environment that the kernel is running under. This need not be
34 # specified for the normal operation for the kernel, but is required for
34 # specified for the normal operation for the kernel, but is required for
35 # IPython's GUI support (including pylab). The default is 'inline' because
35 # IPython's GUI support (including pylab). The default is 'inline' because
36 # it is safe under all GUI toolkits.
36 # it is safe under all GUI toolkits.
37 gui = Enum(('tk', 'gtk', 'wx', 'qt', 'qt4', 'inline'),
37 gui = Enum(('tk', 'gtk', 'wx', 'qt', 'qt4', 'inline'),
38 default_value='inline')
38 default_value='inline')
39
39
40 raw_input_str = Any()
40 raw_input_str = Any()
41 stdout = Any()
41 stdout = Any()
42 stderr = Any()
42 stderr = Any()
43
43
44 #-------------------------------------------------------------------------
44 #-------------------------------------------------------------------------
45 # Kernel interface
45 # Kernel interface
46 #-------------------------------------------------------------------------
46 #-------------------------------------------------------------------------
47
47
48 shell_class = Type()
48 shell_class = Type()
49 shell_streams = List()
49 shell_streams = List()
50 control_stream = Any()
50 control_stream = Any()
51 iopub_socket = Instance(DummySocket, ())
51 iopub_socket = Instance(DummySocket, ())
52 stdin_socket = Instance(DummySocket, ())
52 stdin_socket = Instance(DummySocket, ())
53
53
54 def __init__(self, **traits):
54 def __init__(self, **traits):
55 # When an InteractiveShell is instantiated by our base class, it binds
55 # When an InteractiveShell is instantiated by our base class, it binds
56 # the current values of sys.stdout and sys.stderr.
56 # the current values of sys.stdout and sys.stderr.
57 with self._redirected_io():
57 with self._redirected_io():
58 super(InProcessKernel, self).__init__(**traits)
58 super(InProcessKernel, self).__init__(**traits)
59
59
60 self.iopub_socket.on_trait_change(self._io_dispatch, 'message_sent')
60 self.iopub_socket.on_trait_change(self._io_dispatch, 'message_sent')
61 self.shell.kernel = self
61 self.shell.kernel = self
62
62
63 def execute_request(self, stream, ident, parent):
63 def execute_request(self, stream, ident, parent):
64 """ Override for temporary IO redirection. """
64 """ Override for temporary IO redirection. """
65 with self._redirected_io():
65 with self._redirected_io():
66 super(InProcessKernel, self).execute_request(stream, ident, parent)
66 super(InProcessKernel, self).execute_request(stream, ident, parent)
67
67
68 def start(self):
68 def start(self):
69 """ Override registration of dispatchers for streams. """
69 """ Override registration of dispatchers for streams. """
70 self.shell.exit_now = False
70 self.shell.exit_now = False
71
71
72 def _abort_queue(self, stream):
72 def _abort_queue(self, stream):
73 """ The in-process kernel doesn't abort requests. """
73 """ The in-process kernel doesn't abort requests. """
74 pass
74 pass
75
75
76 def _input_request(self, prompt, ident, parent, password=False):
76 def _input_request(self, prompt, ident, parent, password=False):
77 # Flush output before making the request.
77 # Flush output before making the request.
78 self.raw_input_str = None
78 self.raw_input_str = None
79 sys.stderr.flush()
79 sys.stderr.flush()
80 sys.stdout.flush()
80 sys.stdout.flush()
81
81
82 # Send the input request.
82 # Send the input request.
83 content = json_clean(dict(prompt=prompt, password=password))
83 content = json_clean(dict(prompt=prompt, password=password))
84 msg = self.session.msg(u'input_request', content, parent)
84 msg = self.session.msg(u'input_request', content, parent)
85 for frontend in self.frontends:
85 for frontend in self.frontends:
86 if frontend.session.session == parent['header']['session']:
86 if frontend.session.session == parent['header']['session']:
87 frontend.stdin_channel.call_handlers(msg)
87 frontend.stdin_channel.call_handlers(msg)
88 break
88 break
89 else:
89 else:
90 logging.error('No frontend found for raw_input request')
90 logging.error('No frontend found for raw_input request')
91 return str()
91 return str()
92
92
93 # Await a response.
93 # Await a response.
94 while self.raw_input_str is None:
94 while self.raw_input_str is None:
95 frontend.stdin_channel.process_events()
95 frontend.stdin_channel.process_events()
96 return self.raw_input_str
96 return self.raw_input_str
97
97
98 #-------------------------------------------------------------------------
98 #-------------------------------------------------------------------------
99 # Protected interface
99 # Protected interface
100 #-------------------------------------------------------------------------
100 #-------------------------------------------------------------------------
101
101
102 @contextmanager
102 @contextmanager
103 def _redirected_io(self):
103 def _redirected_io(self):
104 """ Temporarily redirect IO to the kernel.
104 """ Temporarily redirect IO to the kernel.
105 """
105 """
106 sys_stdout, sys_stderr = sys.stdout, sys.stderr
106 sys_stdout, sys_stderr = sys.stdout, sys.stderr
107 sys.stdout, sys.stderr = self.stdout, self.stderr
107 sys.stdout, sys.stderr = self.stdout, self.stderr
108 yield
108 yield
109 sys.stdout, sys.stderr = sys_stdout, sys_stderr
109 sys.stdout, sys.stderr = sys_stdout, sys_stderr
110
110
111 #------ Trait change handlers --------------------------------------------
111 #------ Trait change handlers --------------------------------------------
112
112
113 def _io_dispatch(self):
113 def _io_dispatch(self):
114 """ Called when a message is sent to the IO socket.
114 """ Called when a message is sent to the IO socket.
115 """
115 """
116 ident, msg = self.session.recv(self.iopub_socket, copy=False)
116 ident, msg = self.session.recv(self.iopub_socket, copy=False)
117 for frontend in self.frontends:
117 for frontend in self.frontends:
118 frontend.iopub_channel.call_handlers(msg)
118 frontend.iopub_channel.call_handlers(msg)
119
119
120 #------ Trait initializers -----------------------------------------------
120 #------ Trait initializers -----------------------------------------------
121
121
122 def _log_default(self):
122 def _log_default(self):
123 return logging.getLogger(__name__)
123 return logging.getLogger(__name__)
124
124
125 def _session_default(self):
125 def _session_default(self):
126 from IPython.kernel.zmq.session import Session
126 from IPython.kernel.zmq.session import Session
127 return Session(parent=self)
127 return Session(parent=self, key=b'')
128
128
129 def _shell_class_default(self):
129 def _shell_class_default(self):
130 return InProcessInteractiveShell
130 return InProcessInteractiveShell
131
131
132 def _stdout_default(self):
132 def _stdout_default(self):
133 from IPython.kernel.zmq.iostream import OutStream
133 from IPython.kernel.zmq.iostream import OutStream
134 return OutStream(self.session, self.iopub_socket, u'stdout', pipe=False)
134 return OutStream(self.session, self.iopub_socket, u'stdout', pipe=False)
135
135
136 def _stderr_default(self):
136 def _stderr_default(self):
137 from IPython.kernel.zmq.iostream import OutStream
137 from IPython.kernel.zmq.iostream import OutStream
138 return OutStream(self.session, self.iopub_socket, u'stderr', pipe=False)
138 return OutStream(self.session, self.iopub_socket, u'stderr', pipe=False)
139
139
140 #-----------------------------------------------------------------------------
140 #-----------------------------------------------------------------------------
141 # Interactive shell subclass
141 # Interactive shell subclass
142 #-----------------------------------------------------------------------------
142 #-----------------------------------------------------------------------------
143
143
144 class InProcessInteractiveShell(ZMQInteractiveShell):
144 class InProcessInteractiveShell(ZMQInteractiveShell):
145
145
146 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel')
146 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel')
147
147
148 #-------------------------------------------------------------------------
148 #-------------------------------------------------------------------------
149 # InteractiveShell interface
149 # InteractiveShell interface
150 #-------------------------------------------------------------------------
150 #-------------------------------------------------------------------------
151
151
152 def enable_gui(self, gui=None):
152 def enable_gui(self, gui=None):
153 """Enable GUI integration for the kernel."""
153 """Enable GUI integration for the kernel."""
154 from IPython.kernel.zmq.eventloops import enable_gui
154 from IPython.kernel.zmq.eventloops import enable_gui
155 if not gui:
155 if not gui:
156 gui = self.kernel.gui
156 gui = self.kernel.gui
157 return enable_gui(gui, kernel=self.kernel)
157 return enable_gui(gui, kernel=self.kernel)
158
158
159 def enable_matplotlib(self, gui=None):
159 def enable_matplotlib(self, gui=None):
160 """Enable matplotlib integration for the kernel."""
160 """Enable matplotlib integration for the kernel."""
161 if not gui:
161 if not gui:
162 gui = self.kernel.gui
162 gui = self.kernel.gui
163 return super(InProcessInteractiveShell, self).enable_matplotlib(gui)
163 return super(InProcessInteractiveShell, self).enable_matplotlib(gui)
164
164
165 def enable_pylab(self, gui=None, import_all=True, welcome_message=False):
165 def enable_pylab(self, gui=None, import_all=True, welcome_message=False):
166 """Activate pylab support at runtime."""
166 """Activate pylab support at runtime."""
167 if not gui:
167 if not gui:
168 gui = self.kernel.gui
168 gui = self.kernel.gui
169 return super(InProcessInteractiveShell, self).enable_pylab(gui, import_all,
169 return super(InProcessInteractiveShell, self).enable_pylab(gui, import_all,
170 welcome_message)
170 welcome_message)
171
171
172 InteractiveShellABC.register(InProcessInteractiveShell)
172 InteractiveShellABC.register(InProcessInteractiveShell)
@@ -1,77 +1,71 b''
1 """A kernel manager for in-process kernels."""
1 """A kernel manager for in-process kernels."""
2
2
3 #-----------------------------------------------------------------------------
3 # Copyright (c) IPython Development Team.
4 # Copyright (C) 2013 The IPython Development Team
4 # Distributed under the terms of the Modified BSD License.
5 #
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
9
10 #-----------------------------------------------------------------------------
11 # Imports
12 #-----------------------------------------------------------------------------
13
5
14 from IPython.utils.traitlets import Instance, DottedObjectName
6 from IPython.utils.traitlets import Instance, DottedObjectName
15 from IPython.kernel.managerabc import KernelManagerABC
7 from IPython.kernel.managerabc import KernelManagerABC
16 from IPython.kernel.manager import KernelManager
8 from IPython.kernel.manager import KernelManager
9 from IPython.kernel.zmq.session import Session
17
10
18 #-----------------------------------------------------------------------------
19 # Main kernel manager class
20 #-----------------------------------------------------------------------------
21
11
22 class InProcessKernelManager(KernelManager):
12 class InProcessKernelManager(KernelManager):
23 """A manager for an in-process kernel.
13 """A manager for an in-process kernel.
24
14
25 This class implements the interface of
15 This class implements the interface of
26 `IPython.kernel.kernelmanagerabc.KernelManagerABC` and allows
16 `IPython.kernel.kernelmanagerabc.KernelManagerABC` and allows
27 (asynchronous) frontends to be used seamlessly with an in-process kernel.
17 (asynchronous) frontends to be used seamlessly with an in-process kernel.
28
18
29 See `IPython.kernel.kernelmanager.KernelManager` for docstrings.
19 See `IPython.kernel.kernelmanager.KernelManager` for docstrings.
30 """
20 """
31
21
32 # The kernel process with which the KernelManager is communicating.
22 # The kernel process with which the KernelManager is communicating.
33 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel')
23 kernel = Instance('IPython.kernel.inprocess.ipkernel.InProcessKernel')
34 # the client class for KM.client() shortcut
24 # the client class for KM.client() shortcut
35 client_class = DottedObjectName('IPython.kernel.inprocess.BlockingInProcessKernelClient')
25 client_class = DottedObjectName('IPython.kernel.inprocess.BlockingInProcessKernelClient')
36
26
27 def _session_default(self):
28 # don't sign in-process messages
29 return Session(key=b'', parent=self)
30
37 #--------------------------------------------------------------------------
31 #--------------------------------------------------------------------------
38 # Kernel management methods
32 # Kernel management methods
39 #--------------------------------------------------------------------------
33 #--------------------------------------------------------------------------
40
34
41 def start_kernel(self, **kwds):
35 def start_kernel(self, **kwds):
42 from IPython.kernel.inprocess.ipkernel import InProcessKernel
36 from IPython.kernel.inprocess.ipkernel import InProcessKernel
43 self.kernel = InProcessKernel()
37 self.kernel = InProcessKernel(parent=self, session=self.session)
44
38
45 def shutdown_kernel(self):
39 def shutdown_kernel(self):
46 self._kill_kernel()
40 self._kill_kernel()
47
41
48 def restart_kernel(self, now=False, **kwds):
42 def restart_kernel(self, now=False, **kwds):
49 self.shutdown_kernel()
43 self.shutdown_kernel()
50 self.start_kernel(**kwds)
44 self.start_kernel(**kwds)
51
45
52 @property
46 @property
53 def has_kernel(self):
47 def has_kernel(self):
54 return self.kernel is not None
48 return self.kernel is not None
55
49
56 def _kill_kernel(self):
50 def _kill_kernel(self):
57 self.kernel = None
51 self.kernel = None
58
52
59 def interrupt_kernel(self):
53 def interrupt_kernel(self):
60 raise NotImplementedError("Cannot interrupt in-process kernel.")
54 raise NotImplementedError("Cannot interrupt in-process kernel.")
61
55
62 def signal_kernel(self, signum):
56 def signal_kernel(self, signum):
63 raise NotImplementedError("Cannot signal in-process kernel.")
57 raise NotImplementedError("Cannot signal in-process kernel.")
64
58
65 def is_alive(self):
59 def is_alive(self):
66 return self.kernel is not None
60 return self.kernel is not None
67
61
68 def client(self, **kwargs):
62 def client(self, **kwargs):
69 kwargs['kernel'] = self.kernel
63 kwargs['kernel'] = self.kernel
70 return super(InProcessKernelManager, self).client(**kwargs)
64 return super(InProcessKernelManager, self).client(**kwargs)
71
65
72
66
73 #-----------------------------------------------------------------------------
67 #-----------------------------------------------------------------------------
74 # ABC Registration
68 # ABC Registration
75 #-----------------------------------------------------------------------------
69 #-----------------------------------------------------------------------------
76
70
77 KernelManagerABC.register(InProcessKernelManager)
71 KernelManagerABC.register(InProcessKernelManager)
@@ -1,69 +1,69 b''
1 # Copyright (c) IPython Development Team.
1 # Copyright (c) IPython Development Team.
2 # Distributed under the terms of the Modified BSD License.
2 # Distributed under the terms of the Modified BSD License.
3
3
4 from __future__ import print_function
4 from __future__ import print_function
5
5
6 import sys
6 import sys
7 import unittest
7 import unittest
8
8
9 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
9 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
10 from IPython.kernel.inprocess.manager import InProcessKernelManager
10 from IPython.kernel.inprocess.manager import InProcessKernelManager
11 from IPython.kernel.inprocess.ipkernel import InProcessKernel
11 from IPython.kernel.inprocess.ipkernel import InProcessKernel
12 from IPython.kernel.tests.utils import assemble_output
12 from IPython.kernel.tests.utils import assemble_output
13 from IPython.testing.decorators import skipif_not_matplotlib
13 from IPython.testing.decorators import skipif_not_matplotlib
14 from IPython.utils.io import capture_output
14 from IPython.utils.io import capture_output
15 from IPython.utils import py3compat
15 from IPython.utils import py3compat
16
16
17 if py3compat.PY3:
17 if py3compat.PY3:
18 from io import StringIO
18 from io import StringIO
19 else:
19 else:
20 from StringIO import StringIO
20 from StringIO import StringIO
21
21
22
22
23 class InProcessKernelTestCase(unittest.TestCase):
23 class InProcessKernelTestCase(unittest.TestCase):
24
24
25 def setUp(self):
25 def setUp(self):
26 self.km = InProcessKernelManager()
26 self.km = InProcessKernelManager()
27 self.km.start_kernel()
27 self.km.start_kernel()
28 self.kc = BlockingInProcessKernelClient(kernel=self.km.kernel)
28 self.kc = self.km.client()
29 self.kc.start_channels()
29 self.kc.start_channels()
30 self.kc.wait_for_ready()
30 self.kc.wait_for_ready()
31
31
32 @skipif_not_matplotlib
32 @skipif_not_matplotlib
33 def test_pylab(self):
33 def test_pylab(self):
34 """Does %pylab work in the in-process kernel?"""
34 """Does %pylab work in the in-process kernel?"""
35 kc = self.kc
35 kc = self.kc
36 kc.execute('%pylab')
36 kc.execute('%pylab')
37 out, err = assemble_output(kc.iopub_channel)
37 out, err = assemble_output(kc.iopub_channel)
38 self.assertIn('matplotlib', out)
38 self.assertIn('matplotlib', out)
39
39
40 def test_raw_input(self):
40 def test_raw_input(self):
41 """ Does the in-process kernel handle raw_input correctly?
41 """ Does the in-process kernel handle raw_input correctly?
42 """
42 """
43 io = StringIO('foobar\n')
43 io = StringIO('foobar\n')
44 sys_stdin = sys.stdin
44 sys_stdin = sys.stdin
45 sys.stdin = io
45 sys.stdin = io
46 try:
46 try:
47 if py3compat.PY3:
47 if py3compat.PY3:
48 self.kc.execute('x = input()')
48 self.kc.execute('x = input()')
49 else:
49 else:
50 self.kc.execute('x = raw_input()')
50 self.kc.execute('x = raw_input()')
51 finally:
51 finally:
52 sys.stdin = sys_stdin
52 sys.stdin = sys_stdin
53 self.assertEqual(self.km.kernel.shell.user_ns.get('x'), 'foobar')
53 self.assertEqual(self.km.kernel.shell.user_ns.get('x'), 'foobar')
54
54
55 def test_stdout(self):
55 def test_stdout(self):
56 """ Does the in-process kernel correctly capture IO?
56 """ Does the in-process kernel correctly capture IO?
57 """
57 """
58 kernel = InProcessKernel()
58 kernel = InProcessKernel()
59
59
60 with capture_output() as io:
60 with capture_output() as io:
61 kernel.shell.run_cell('print("foo")')
61 kernel.shell.run_cell('print("foo")')
62 self.assertEqual(io.stdout, 'foo\n')
62 self.assertEqual(io.stdout, 'foo\n')
63
63
64 kc = BlockingInProcessKernelClient(kernel=kernel)
64 kc = BlockingInProcessKernelClient(kernel=kernel, session=kernel.session)
65 kernel.frontends.append(kc)
65 kernel.frontends.append(kc)
66 kc.execute('print("bar")')
66 kc.execute('print("bar")')
67 out, err = assemble_output(kc.iopub_channel)
67 out, err = assemble_output(kc.iopub_channel)
68 self.assertEqual(out, 'bar\n')
68 self.assertEqual(out, 'bar\n')
69
69
@@ -1,108 +1,108 b''
1 # Copyright (c) IPython Development Team.
1 # Copyright (c) IPython Development Team.
2 # Distributed under the terms of the Modified BSD License.
2 # Distributed under the terms of the Modified BSD License.
3
3
4 from __future__ import print_function
4 from __future__ import print_function
5
5
6 import unittest
6 import unittest
7
7
8 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
8 from IPython.kernel.inprocess.blocking import BlockingInProcessKernelClient
9 from IPython.kernel.inprocess.manager import InProcessKernelManager
9 from IPython.kernel.inprocess.manager import InProcessKernelManager
10
10
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12 # Test case
12 # Test case
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 class InProcessKernelManagerTestCase(unittest.TestCase):
15 class InProcessKernelManagerTestCase(unittest.TestCase):
16
16
17 def test_interface(self):
17 def test_interface(self):
18 """ Does the in-process kernel manager implement the basic KM interface?
18 """ Does the in-process kernel manager implement the basic KM interface?
19 """
19 """
20 km = InProcessKernelManager()
20 km = InProcessKernelManager()
21 self.assert_(not km.has_kernel)
21 self.assert_(not km.has_kernel)
22
22
23 km.start_kernel()
23 km.start_kernel()
24 self.assert_(km.has_kernel)
24 self.assert_(km.has_kernel)
25 self.assert_(km.kernel is not None)
25 self.assert_(km.kernel is not None)
26
26
27 kc = BlockingInProcessKernelClient(kernel=km.kernel)
27 kc = km.client()
28 self.assert_(not kc.channels_running)
28 self.assert_(not kc.channels_running)
29
29
30 kc.start_channels()
30 kc.start_channels()
31 self.assert_(kc.channels_running)
31 self.assert_(kc.channels_running)
32
32
33 old_kernel = km.kernel
33 old_kernel = km.kernel
34 km.restart_kernel()
34 km.restart_kernel()
35 self.assertIsNotNone(km.kernel)
35 self.assertIsNotNone(km.kernel)
36 self.assertNotEquals(km.kernel, old_kernel)
36 self.assertNotEquals(km.kernel, old_kernel)
37
37
38 km.shutdown_kernel()
38 km.shutdown_kernel()
39 self.assert_(not km.has_kernel)
39 self.assert_(not km.has_kernel)
40
40
41 self.assertRaises(NotImplementedError, km.interrupt_kernel)
41 self.assertRaises(NotImplementedError, km.interrupt_kernel)
42 self.assertRaises(NotImplementedError, km.signal_kernel, 9)
42 self.assertRaises(NotImplementedError, km.signal_kernel, 9)
43
43
44 kc.stop_channels()
44 kc.stop_channels()
45 self.assert_(not kc.channels_running)
45 self.assert_(not kc.channels_running)
46
46
47 def test_execute(self):
47 def test_execute(self):
48 """ Does executing code in an in-process kernel work?
48 """ Does executing code in an in-process kernel work?
49 """
49 """
50 km = InProcessKernelManager()
50 km = InProcessKernelManager()
51 km.start_kernel()
51 km.start_kernel()
52 kc = BlockingInProcessKernelClient(kernel=km.kernel)
52 kc = km.client()
53 kc.start_channels()
53 kc.start_channels()
54 kc.wait_for_ready()
54 kc.wait_for_ready()
55 kc.execute('foo = 1')
55 kc.execute('foo = 1')
56 self.assertEquals(km.kernel.shell.user_ns['foo'], 1)
56 self.assertEquals(km.kernel.shell.user_ns['foo'], 1)
57
57
58 def test_complete(self):
58 def test_complete(self):
59 """ Does requesting completion from an in-process kernel work?
59 """ Does requesting completion from an in-process kernel work?
60 """
60 """
61 km = InProcessKernelManager()
61 km = InProcessKernelManager()
62 km.start_kernel()
62 km.start_kernel()
63 kc = BlockingInProcessKernelClient(kernel=km.kernel)
63 kc = km.client()
64 kc.start_channels()
64 kc.start_channels()
65 kc.wait_for_ready()
65 kc.wait_for_ready()
66 km.kernel.shell.push({'my_bar': 0, 'my_baz': 1})
66 km.kernel.shell.push({'my_bar': 0, 'my_baz': 1})
67 kc.complete('my_ba', 5)
67 kc.complete('my_ba', 5)
68 msg = kc.get_shell_msg()
68 msg = kc.get_shell_msg()
69 self.assertEqual(msg['header']['msg_type'], 'complete_reply')
69 self.assertEqual(msg['header']['msg_type'], 'complete_reply')
70 self.assertEqual(sorted(msg['content']['matches']),
70 self.assertEqual(sorted(msg['content']['matches']),
71 ['my_bar', 'my_baz'])
71 ['my_bar', 'my_baz'])
72
72
73 def test_inspect(self):
73 def test_inspect(self):
74 """ Does requesting object information from an in-process kernel work?
74 """ Does requesting object information from an in-process kernel work?
75 """
75 """
76 km = InProcessKernelManager()
76 km = InProcessKernelManager()
77 km.start_kernel()
77 km.start_kernel()
78 kc = BlockingInProcessKernelClient(kernel=km.kernel)
78 kc = km.client()
79 kc.start_channels()
79 kc.start_channels()
80 kc.wait_for_ready()
80 kc.wait_for_ready()
81 km.kernel.shell.user_ns['foo'] = 1
81 km.kernel.shell.user_ns['foo'] = 1
82 kc.inspect('foo')
82 kc.inspect('foo')
83 msg = kc.get_shell_msg()
83 msg = kc.get_shell_msg()
84 self.assertEqual(msg['header']['msg_type'], 'inspect_reply')
84 self.assertEqual(msg['header']['msg_type'], 'inspect_reply')
85 content = msg['content']
85 content = msg['content']
86 assert content['found']
86 assert content['found']
87 text = content['data']['text/plain']
87 text = content['data']['text/plain']
88 self.assertIn('int', text)
88 self.assertIn('int', text)
89
89
90 def test_history(self):
90 def test_history(self):
91 """ Does requesting history from an in-process kernel work?
91 """ Does requesting history from an in-process kernel work?
92 """
92 """
93 km = InProcessKernelManager()
93 km = InProcessKernelManager()
94 km.start_kernel()
94 km.start_kernel()
95 kc = BlockingInProcessKernelClient(kernel=km.kernel)
95 kc = km.client()
96 kc.start_channels()
96 kc.start_channels()
97 kc.wait_for_ready()
97 kc.wait_for_ready()
98 kc.execute('%who')
98 kc.execute('%who')
99 kc.history(hist_access_type='tail', n=1)
99 kc.history(hist_access_type='tail', n=1)
100 msg = kc.shell_channel.get_msgs()[-1]
100 msg = kc.shell_channel.get_msgs()[-1]
101 self.assertEquals(msg['header']['msg_type'], 'history_reply')
101 self.assertEquals(msg['header']['msg_type'], 'history_reply')
102 history = msg['content']['history']
102 history = msg['content']['history']
103 self.assertEquals(len(history), 1)
103 self.assertEquals(len(history), 1)
104 self.assertEquals(history[0][2], '%who')
104 self.assertEquals(history[0][2], '%who')
105
105
106
106
107 if __name__ == '__main__':
107 if __name__ == '__main__':
108 unittest.main()
108 unittest.main()
@@ -1,388 +1,387 b''
1 """An Application for launching a kernel"""
1 """An Application for launching a kernel"""
2
2
3 # Copyright (c) IPython Development Team.
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
4 # Distributed under the terms of the Modified BSD License.
5
5
6 from __future__ import print_function
6 from __future__ import print_function
7
7
8 import atexit
8 import atexit
9 import os
9 import os
10 import sys
10 import sys
11 import signal
11 import signal
12
12
13 import zmq
13 import zmq
14 from zmq.eventloop import ioloop
14 from zmq.eventloop import ioloop
15 from zmq.eventloop.zmqstream import ZMQStream
15 from zmq.eventloop.zmqstream import ZMQStream
16
16
17 from IPython.core.ultratb import FormattedTB
17 from IPython.core.ultratb import FormattedTB
18 from IPython.core.application import (
18 from IPython.core.application import (
19 BaseIPythonApplication, base_flags, base_aliases, catch_config_error
19 BaseIPythonApplication, base_flags, base_aliases, catch_config_error
20 )
20 )
21 from IPython.core.profiledir import ProfileDir
21 from IPython.core.profiledir import ProfileDir
22 from IPython.core.shellapp import (
22 from IPython.core.shellapp import (
23 InteractiveShellApp, shell_flags, shell_aliases
23 InteractiveShellApp, shell_flags, shell_aliases
24 )
24 )
25 from IPython.utils import io
25 from IPython.utils import io
26 from IPython.utils.path import filefind
26 from IPython.utils.path import filefind
27 from IPython.utils.traitlets import (
27 from IPython.utils.traitlets import (
28 Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type,
28 Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type,
29 )
29 )
30 from IPython.utils.importstring import import_item
30 from IPython.utils.importstring import import_item
31 from IPython.kernel import write_connection_file
31 from IPython.kernel import write_connection_file
32 from IPython.kernel.connect import ConnectionFileMixin
32 from IPython.kernel.connect import ConnectionFileMixin
33
33
34 # local imports
34 # local imports
35 from .heartbeat import Heartbeat
35 from .heartbeat import Heartbeat
36 from .ipkernel import IPythonKernel
36 from .ipkernel import IPythonKernel
37 from .parentpoller import ParentPollerUnix, ParentPollerWindows
37 from .parentpoller import ParentPollerUnix, ParentPollerWindows
38 from .session import (
38 from .session import (
39 Session, session_flags, session_aliases, default_secure,
39 Session, session_flags, session_aliases,
40 )
40 )
41 from .zmqshell import ZMQInteractiveShell
41 from .zmqshell import ZMQInteractiveShell
42
42
43 #-----------------------------------------------------------------------------
43 #-----------------------------------------------------------------------------
44 # Flags and Aliases
44 # Flags and Aliases
45 #-----------------------------------------------------------------------------
45 #-----------------------------------------------------------------------------
46
46
47 kernel_aliases = dict(base_aliases)
47 kernel_aliases = dict(base_aliases)
48 kernel_aliases.update({
48 kernel_aliases.update({
49 'ip' : 'IPKernelApp.ip',
49 'ip' : 'IPKernelApp.ip',
50 'hb' : 'IPKernelApp.hb_port',
50 'hb' : 'IPKernelApp.hb_port',
51 'shell' : 'IPKernelApp.shell_port',
51 'shell' : 'IPKernelApp.shell_port',
52 'iopub' : 'IPKernelApp.iopub_port',
52 'iopub' : 'IPKernelApp.iopub_port',
53 'stdin' : 'IPKernelApp.stdin_port',
53 'stdin' : 'IPKernelApp.stdin_port',
54 'control' : 'IPKernelApp.control_port',
54 'control' : 'IPKernelApp.control_port',
55 'f' : 'IPKernelApp.connection_file',
55 'f' : 'IPKernelApp.connection_file',
56 'transport': 'IPKernelApp.transport',
56 'transport': 'IPKernelApp.transport',
57 })
57 })
58
58
59 kernel_flags = dict(base_flags)
59 kernel_flags = dict(base_flags)
60 kernel_flags.update({
60 kernel_flags.update({
61 'no-stdout' : (
61 'no-stdout' : (
62 {'IPKernelApp' : {'no_stdout' : True}},
62 {'IPKernelApp' : {'no_stdout' : True}},
63 "redirect stdout to the null device"),
63 "redirect stdout to the null device"),
64 'no-stderr' : (
64 'no-stderr' : (
65 {'IPKernelApp' : {'no_stderr' : True}},
65 {'IPKernelApp' : {'no_stderr' : True}},
66 "redirect stderr to the null device"),
66 "redirect stderr to the null device"),
67 'pylab' : (
67 'pylab' : (
68 {'IPKernelApp' : {'pylab' : 'auto'}},
68 {'IPKernelApp' : {'pylab' : 'auto'}},
69 """Pre-load matplotlib and numpy for interactive use with
69 """Pre-load matplotlib and numpy for interactive use with
70 the default matplotlib backend."""),
70 the default matplotlib backend."""),
71 })
71 })
72
72
73 # inherit flags&aliases for any IPython shell apps
73 # inherit flags&aliases for any IPython shell apps
74 kernel_aliases.update(shell_aliases)
74 kernel_aliases.update(shell_aliases)
75 kernel_flags.update(shell_flags)
75 kernel_flags.update(shell_flags)
76
76
77 # inherit flags&aliases for Sessions
77 # inherit flags&aliases for Sessions
78 kernel_aliases.update(session_aliases)
78 kernel_aliases.update(session_aliases)
79 kernel_flags.update(session_flags)
79 kernel_flags.update(session_flags)
80
80
81 _ctrl_c_message = """\
81 _ctrl_c_message = """\
82 NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
82 NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
83
83
84 To exit, you will have to explicitly quit this process, by either sending
84 To exit, you will have to explicitly quit this process, by either sending
85 "quit" from a client, or using Ctrl-\\ in UNIX-like environments.
85 "quit" from a client, or using Ctrl-\\ in UNIX-like environments.
86
86
87 To read more about this, see https://github.com/ipython/ipython/issues/2049
87 To read more about this, see https://github.com/ipython/ipython/issues/2049
88
88
89 """
89 """
90
90
91 #-----------------------------------------------------------------------------
91 #-----------------------------------------------------------------------------
92 # Application class for starting an IPython Kernel
92 # Application class for starting an IPython Kernel
93 #-----------------------------------------------------------------------------
93 #-----------------------------------------------------------------------------
94
94
95 class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,
95 class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,
96 ConnectionFileMixin):
96 ConnectionFileMixin):
97 name='ipython-kernel'
97 name='ipython-kernel'
98 aliases = Dict(kernel_aliases)
98 aliases = Dict(kernel_aliases)
99 flags = Dict(kernel_flags)
99 flags = Dict(kernel_flags)
100 classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session]
100 classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session]
101 # the kernel class, as an importstring
101 # the kernel class, as an importstring
102 kernel_class = Type('IPython.kernel.zmq.ipkernel.IPythonKernel', config=True,
102 kernel_class = Type('IPython.kernel.zmq.ipkernel.IPythonKernel', config=True,
103 klass='IPython.kernel.zmq.kernelbase.Kernel',
103 klass='IPython.kernel.zmq.kernelbase.Kernel',
104 help="""The Kernel subclass to be used.
104 help="""The Kernel subclass to be used.
105
105
106 This should allow easy re-use of the IPKernelApp entry point
106 This should allow easy re-use of the IPKernelApp entry point
107 to configure and launch kernels other than IPython's own.
107 to configure and launch kernels other than IPython's own.
108 """)
108 """)
109 kernel = Any()
109 kernel = Any()
110 poller = Any() # don't restrict this even though current pollers are all Threads
110 poller = Any() # don't restrict this even though current pollers are all Threads
111 heartbeat = Instance(Heartbeat)
111 heartbeat = Instance(Heartbeat)
112 ports = Dict()
112 ports = Dict()
113
113
114 # connection info:
114 # connection info:
115
115
116 @property
116 @property
117 def abs_connection_file(self):
117 def abs_connection_file(self):
118 if os.path.basename(self.connection_file) == self.connection_file:
118 if os.path.basename(self.connection_file) == self.connection_file:
119 return os.path.join(self.profile_dir.security_dir, self.connection_file)
119 return os.path.join(self.profile_dir.security_dir, self.connection_file)
120 else:
120 else:
121 return self.connection_file
121 return self.connection_file
122
122
123
123
124 # streams, etc.
124 # streams, etc.
125 no_stdout = Bool(False, config=True, help="redirect stdout to the null device")
125 no_stdout = Bool(False, config=True, help="redirect stdout to the null device")
126 no_stderr = Bool(False, config=True, help="redirect stderr to the null device")
126 no_stderr = Bool(False, config=True, help="redirect stderr to the null device")
127 outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream',
127 outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream',
128 config=True, help="The importstring for the OutStream factory")
128 config=True, help="The importstring for the OutStream factory")
129 displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook',
129 displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook',
130 config=True, help="The importstring for the DisplayHook factory")
130 config=True, help="The importstring for the DisplayHook factory")
131
131
132 # polling
132 # polling
133 parent_handle = Integer(int(os.environ.get('JPY_PARENT_PID') or 0), config=True,
133 parent_handle = Integer(int(os.environ.get('JPY_PARENT_PID') or 0), config=True,
134 help="""kill this process if its parent dies. On Windows, the argument
134 help="""kill this process if its parent dies. On Windows, the argument
135 specifies the HANDLE of the parent process, otherwise it is simply boolean.
135 specifies the HANDLE of the parent process, otherwise it is simply boolean.
136 """)
136 """)
137 interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0), config=True,
137 interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0), config=True,
138 help="""ONLY USED ON WINDOWS
138 help="""ONLY USED ON WINDOWS
139 Interrupt this process when the parent is signaled.
139 Interrupt this process when the parent is signaled.
140 """)
140 """)
141
141
142 def init_crash_handler(self):
142 def init_crash_handler(self):
143 # Install minimal exception handling
143 # Install minimal exception handling
144 sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor',
144 sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor',
145 ostream=sys.__stdout__)
145 ostream=sys.__stdout__)
146
146
147 def init_poller(self):
147 def init_poller(self):
148 if sys.platform == 'win32':
148 if sys.platform == 'win32':
149 if self.interrupt or self.parent_handle:
149 if self.interrupt or self.parent_handle:
150 self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
150 self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
151 elif self.parent_handle:
151 elif self.parent_handle:
152 self.poller = ParentPollerUnix()
152 self.poller = ParentPollerUnix()
153
153
154 def _bind_socket(self, s, port):
154 def _bind_socket(self, s, port):
155 iface = '%s://%s' % (self.transport, self.ip)
155 iface = '%s://%s' % (self.transport, self.ip)
156 if self.transport == 'tcp':
156 if self.transport == 'tcp':
157 if port <= 0:
157 if port <= 0:
158 port = s.bind_to_random_port(iface)
158 port = s.bind_to_random_port(iface)
159 else:
159 else:
160 s.bind("tcp://%s:%i" % (self.ip, port))
160 s.bind("tcp://%s:%i" % (self.ip, port))
161 elif self.transport == 'ipc':
161 elif self.transport == 'ipc':
162 if port <= 0:
162 if port <= 0:
163 port = 1
163 port = 1
164 path = "%s-%i" % (self.ip, port)
164 path = "%s-%i" % (self.ip, port)
165 while os.path.exists(path):
165 while os.path.exists(path):
166 port = port + 1
166 port = port + 1
167 path = "%s-%i" % (self.ip, port)
167 path = "%s-%i" % (self.ip, port)
168 else:
168 else:
169 path = "%s-%i" % (self.ip, port)
169 path = "%s-%i" % (self.ip, port)
170 s.bind("ipc://%s" % path)
170 s.bind("ipc://%s" % path)
171 return port
171 return port
172
172
173 def write_connection_file(self):
173 def write_connection_file(self):
174 """write connection info to JSON file"""
174 """write connection info to JSON file"""
175 cf = self.abs_connection_file
175 cf = self.abs_connection_file
176 self.log.debug("Writing connection file: %s", cf)
176 self.log.debug("Writing connection file: %s", cf)
177 write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
177 write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
178 shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
178 shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
179 iopub_port=self.iopub_port, control_port=self.control_port)
179 iopub_port=self.iopub_port, control_port=self.control_port)
180
180
181 def cleanup_connection_file(self):
181 def cleanup_connection_file(self):
182 cf = self.abs_connection_file
182 cf = self.abs_connection_file
183 self.log.debug("Cleaning up connection file: %s", cf)
183 self.log.debug("Cleaning up connection file: %s", cf)
184 try:
184 try:
185 os.remove(cf)
185 os.remove(cf)
186 except (IOError, OSError):
186 except (IOError, OSError):
187 pass
187 pass
188
188
189 self.cleanup_ipc_files()
189 self.cleanup_ipc_files()
190
190
191 def init_connection_file(self):
191 def init_connection_file(self):
192 if not self.connection_file:
192 if not self.connection_file:
193 self.connection_file = "kernel-%s.json"%os.getpid()
193 self.connection_file = "kernel-%s.json"%os.getpid()
194 try:
194 try:
195 self.connection_file = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
195 self.connection_file = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
196 except IOError:
196 except IOError:
197 self.log.debug("Connection file not found: %s", self.connection_file)
197 self.log.debug("Connection file not found: %s", self.connection_file)
198 # This means I own it, so I will clean it up:
198 # This means I own it, so I will clean it up:
199 atexit.register(self.cleanup_connection_file)
199 atexit.register(self.cleanup_connection_file)
200 return
200 return
201 try:
201 try:
202 self.load_connection_file()
202 self.load_connection_file()
203 except Exception:
203 except Exception:
204 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
204 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
205 self.exit(1)
205 self.exit(1)
206
206
207 def init_sockets(self):
207 def init_sockets(self):
208 # Create a context, a session, and the kernel sockets.
208 # Create a context, a session, and the kernel sockets.
209 self.log.info("Starting the kernel at pid: %i", os.getpid())
209 self.log.info("Starting the kernel at pid: %i", os.getpid())
210 context = zmq.Context.instance()
210 context = zmq.Context.instance()
211 # Uncomment this to try closing the context.
211 # Uncomment this to try closing the context.
212 # atexit.register(context.term)
212 # atexit.register(context.term)
213
213
214 self.shell_socket = context.socket(zmq.ROUTER)
214 self.shell_socket = context.socket(zmq.ROUTER)
215 self.shell_socket.linger = 1000
215 self.shell_socket.linger = 1000
216 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
216 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
217 self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
217 self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
218
218
219 self.iopub_socket = context.socket(zmq.PUB)
219 self.iopub_socket = context.socket(zmq.PUB)
220 self.iopub_socket.linger = 1000
220 self.iopub_socket.linger = 1000
221 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
221 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
222 self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
222 self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
223
223
224 self.stdin_socket = context.socket(zmq.ROUTER)
224 self.stdin_socket = context.socket(zmq.ROUTER)
225 self.stdin_socket.linger = 1000
225 self.stdin_socket.linger = 1000
226 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
226 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
227 self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
227 self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
228
228
229 self.control_socket = context.socket(zmq.ROUTER)
229 self.control_socket = context.socket(zmq.ROUTER)
230 self.control_socket.linger = 1000
230 self.control_socket.linger = 1000
231 self.control_port = self._bind_socket(self.control_socket, self.control_port)
231 self.control_port = self._bind_socket(self.control_socket, self.control_port)
232 self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
232 self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
233
233
234 def init_heartbeat(self):
234 def init_heartbeat(self):
235 """start the heart beating"""
235 """start the heart beating"""
236 # heartbeat doesn't share context, because it mustn't be blocked
236 # heartbeat doesn't share context, because it mustn't be blocked
237 # by the GIL, which is accessed by libzmq when freeing zero-copy messages
237 # by the GIL, which is accessed by libzmq when freeing zero-copy messages
238 hb_ctx = zmq.Context()
238 hb_ctx = zmq.Context()
239 self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
239 self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
240 self.hb_port = self.heartbeat.port
240 self.hb_port = self.heartbeat.port
241 self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
241 self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
242 self.heartbeat.start()
242 self.heartbeat.start()
243
243
244 def log_connection_info(self):
244 def log_connection_info(self):
245 """display connection info, and store ports"""
245 """display connection info, and store ports"""
246 basename = os.path.basename(self.connection_file)
246 basename = os.path.basename(self.connection_file)
247 if basename == self.connection_file or \
247 if basename == self.connection_file or \
248 os.path.dirname(self.connection_file) == self.profile_dir.security_dir:
248 os.path.dirname(self.connection_file) == self.profile_dir.security_dir:
249 # use shortname
249 # use shortname
250 tail = basename
250 tail = basename
251 if self.profile != 'default':
251 if self.profile != 'default':
252 tail += " --profile %s" % self.profile
252 tail += " --profile %s" % self.profile
253 else:
253 else:
254 tail = self.connection_file
254 tail = self.connection_file
255 lines = [
255 lines = [
256 "To connect another client to this kernel, use:",
256 "To connect another client to this kernel, use:",
257 " --existing %s" % tail,
257 " --existing %s" % tail,
258 ]
258 ]
259 # log connection info
259 # log connection info
260 # info-level, so often not shown.
260 # info-level, so often not shown.
261 # frontends should use the %connect_info magic
261 # frontends should use the %connect_info magic
262 # to see the connection info
262 # to see the connection info
263 for line in lines:
263 for line in lines:
264 self.log.info(line)
264 self.log.info(line)
265 # also raw print to the terminal if no parent_handle (`ipython kernel`)
265 # also raw print to the terminal if no parent_handle (`ipython kernel`)
266 if not self.parent_handle:
266 if not self.parent_handle:
267 io.rprint(_ctrl_c_message)
267 io.rprint(_ctrl_c_message)
268 for line in lines:
268 for line in lines:
269 io.rprint(line)
269 io.rprint(line)
270
270
271 self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
271 self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
272 stdin=self.stdin_port, hb=self.hb_port,
272 stdin=self.stdin_port, hb=self.hb_port,
273 control=self.control_port)
273 control=self.control_port)
274
274
275 def init_blackhole(self):
275 def init_blackhole(self):
276 """redirects stdout/stderr to devnull if necessary"""
276 """redirects stdout/stderr to devnull if necessary"""
277 if self.no_stdout or self.no_stderr:
277 if self.no_stdout or self.no_stderr:
278 blackhole = open(os.devnull, 'w')
278 blackhole = open(os.devnull, 'w')
279 if self.no_stdout:
279 if self.no_stdout:
280 sys.stdout = sys.__stdout__ = blackhole
280 sys.stdout = sys.__stdout__ = blackhole
281 if self.no_stderr:
281 if self.no_stderr:
282 sys.stderr = sys.__stderr__ = blackhole
282 sys.stderr = sys.__stderr__ = blackhole
283
283
284 def init_io(self):
284 def init_io(self):
285 """Redirect input streams and set a display hook."""
285 """Redirect input streams and set a display hook."""
286 if self.outstream_class:
286 if self.outstream_class:
287 outstream_factory = import_item(str(self.outstream_class))
287 outstream_factory = import_item(str(self.outstream_class))
288 sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout')
288 sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout')
289 sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr')
289 sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr')
290 if self.displayhook_class:
290 if self.displayhook_class:
291 displayhook_factory = import_item(str(self.displayhook_class))
291 displayhook_factory = import_item(str(self.displayhook_class))
292 sys.displayhook = displayhook_factory(self.session, self.iopub_socket)
292 sys.displayhook = displayhook_factory(self.session, self.iopub_socket)
293
293
294 def init_signal(self):
294 def init_signal(self):
295 signal.signal(signal.SIGINT, signal.SIG_IGN)
295 signal.signal(signal.SIGINT, signal.SIG_IGN)
296
296
297 def init_kernel(self):
297 def init_kernel(self):
298 """Create the Kernel object itself"""
298 """Create the Kernel object itself"""
299 shell_stream = ZMQStream(self.shell_socket)
299 shell_stream = ZMQStream(self.shell_socket)
300 control_stream = ZMQStream(self.control_socket)
300 control_stream = ZMQStream(self.control_socket)
301
301
302 kernel_factory = self.kernel_class.instance
302 kernel_factory = self.kernel_class.instance
303
303
304 kernel = kernel_factory(parent=self, session=self.session,
304 kernel = kernel_factory(parent=self, session=self.session,
305 shell_streams=[shell_stream, control_stream],
305 shell_streams=[shell_stream, control_stream],
306 iopub_socket=self.iopub_socket,
306 iopub_socket=self.iopub_socket,
307 stdin_socket=self.stdin_socket,
307 stdin_socket=self.stdin_socket,
308 log=self.log,
308 log=self.log,
309 profile_dir=self.profile_dir,
309 profile_dir=self.profile_dir,
310 user_ns=self.user_ns,
310 user_ns=self.user_ns,
311 )
311 )
312 kernel.record_ports(self.ports)
312 kernel.record_ports(self.ports)
313 self.kernel = kernel
313 self.kernel = kernel
314
314
315 def init_gui_pylab(self):
315 def init_gui_pylab(self):
316 """Enable GUI event loop integration, taking pylab into account."""
316 """Enable GUI event loop integration, taking pylab into account."""
317
317
318 # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
318 # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
319 # to ensure that any exception is printed straight to stderr.
319 # to ensure that any exception is printed straight to stderr.
320 # Normally _showtraceback associates the reply with an execution,
320 # Normally _showtraceback associates the reply with an execution,
321 # which means frontends will never draw it, as this exception
321 # which means frontends will never draw it, as this exception
322 # is not associated with any execute request.
322 # is not associated with any execute request.
323
323
324 shell = self.shell
324 shell = self.shell
325 _showtraceback = shell._showtraceback
325 _showtraceback = shell._showtraceback
326 try:
326 try:
327 # replace error-sending traceback with stderr
327 # replace error-sending traceback with stderr
328 def print_tb(etype, evalue, stb):
328 def print_tb(etype, evalue, stb):
329 print ("GUI event loop or pylab initialization failed",
329 print ("GUI event loop or pylab initialization failed",
330 file=io.stderr)
330 file=io.stderr)
331 print (shell.InteractiveTB.stb2text(stb), file=io.stderr)
331 print (shell.InteractiveTB.stb2text(stb), file=io.stderr)
332 shell._showtraceback = print_tb
332 shell._showtraceback = print_tb
333 InteractiveShellApp.init_gui_pylab(self)
333 InteractiveShellApp.init_gui_pylab(self)
334 finally:
334 finally:
335 shell._showtraceback = _showtraceback
335 shell._showtraceback = _showtraceback
336
336
337 def init_shell(self):
337 def init_shell(self):
338 self.shell = getattr(self.kernel, 'shell', None)
338 self.shell = getattr(self.kernel, 'shell', None)
339 if self.shell:
339 if self.shell:
340 self.shell.configurables.append(self)
340 self.shell.configurables.append(self)
341
341
342 @catch_config_error
342 @catch_config_error
343 def initialize(self, argv=None):
343 def initialize(self, argv=None):
344 super(IPKernelApp, self).initialize(argv)
344 super(IPKernelApp, self).initialize(argv)
345 default_secure(self.config)
346 self.init_blackhole()
345 self.init_blackhole()
347 self.init_connection_file()
346 self.init_connection_file()
348 self.init_poller()
347 self.init_poller()
349 self.init_sockets()
348 self.init_sockets()
350 self.init_heartbeat()
349 self.init_heartbeat()
351 # writing/displaying connection info must be *after* init_sockets/heartbeat
350 # writing/displaying connection info must be *after* init_sockets/heartbeat
352 self.log_connection_info()
351 self.log_connection_info()
353 self.write_connection_file()
352 self.write_connection_file()
354 self.init_io()
353 self.init_io()
355 self.init_signal()
354 self.init_signal()
356 self.init_kernel()
355 self.init_kernel()
357 # shell init steps
356 # shell init steps
358 self.init_path()
357 self.init_path()
359 self.init_shell()
358 self.init_shell()
360 if self.shell:
359 if self.shell:
361 self.init_gui_pylab()
360 self.init_gui_pylab()
362 self.init_extensions()
361 self.init_extensions()
363 self.init_code()
362 self.init_code()
364 # flush stdout/stderr, so that anything written to these streams during
363 # flush stdout/stderr, so that anything written to these streams during
365 # initialization do not get associated with the first execution request
364 # initialization do not get associated with the first execution request
366 sys.stdout.flush()
365 sys.stdout.flush()
367 sys.stderr.flush()
366 sys.stderr.flush()
368
367
369 def start(self):
368 def start(self):
370 if self.poller is not None:
369 if self.poller is not None:
371 self.poller.start()
370 self.poller.start()
372 self.kernel.start()
371 self.kernel.start()
373 try:
372 try:
374 ioloop.IOLoop.instance().start()
373 ioloop.IOLoop.instance().start()
375 except KeyboardInterrupt:
374 except KeyboardInterrupt:
376 pass
375 pass
377
376
378 launch_new_instance = IPKernelApp.launch_instance
377 launch_new_instance = IPKernelApp.launch_instance
379
378
380 def main():
379 def main():
381 """Run an IPKernel as an application"""
380 """Run an IPKernel as an application"""
382 app = IPKernelApp.instance()
381 app = IPKernelApp.instance()
383 app.initialize()
382 app.initialize()
384 app.start()
383 app.start()
385
384
386
385
387 if __name__ == '__main__':
386 if __name__ == '__main__':
388 main()
387 main()
@@ -1,873 +1,877 b''
1 """Session object for building, serializing, sending, and receiving messages in
1 """Session object for building, serializing, sending, and receiving messages in
2 IPython. The Session object supports serialization, HMAC signatures, and
2 IPython. The Session object supports serialization, HMAC signatures, and
3 metadata on messages.
3 metadata on messages.
4
4
5 Also defined here are utilities for working with Sessions:
5 Also defined here are utilities for working with Sessions:
6 * A SessionFactory to be used as a base class for configurables that work with
6 * A SessionFactory to be used as a base class for configurables that work with
7 Sessions.
7 Sessions.
8 * A Message object for convenience that allows attribute-access to the msg dict.
8 * A Message object for convenience that allows attribute-access to the msg dict.
9 """
9 """
10
10
11 # Copyright (c) IPython Development Team.
11 # Copyright (c) IPython Development Team.
12 # Distributed under the terms of the Modified BSD License.
12 # Distributed under the terms of the Modified BSD License.
13
13
14 import hashlib
14 import hashlib
15 import hmac
15 import hmac
16 import logging
16 import logging
17 import os
17 import os
18 import pprint
18 import pprint
19 import random
19 import random
20 import uuid
20 import uuid
21 import warnings
21 import warnings
22 from datetime import datetime
22 from datetime import datetime
23
23
24 try:
24 try:
25 import cPickle
25 import cPickle
26 pickle = cPickle
26 pickle = cPickle
27 except:
27 except:
28 cPickle = None
28 cPickle = None
29 import pickle
29 import pickle
30
30
31 try:
31 try:
32 # We are using compare_digest to limit the surface of timing attacks
32 # We are using compare_digest to limit the surface of timing attacks
33 from hmac import compare_digest
33 from hmac import compare_digest
34 except ImportError:
34 except ImportError:
35 # Python < 2.7.7: When digests don't match no feedback is provided,
35 # Python < 2.7.7: When digests don't match no feedback is provided,
36 # limiting the surface of attack
36 # limiting the surface of attack
37 def compare_digest(a,b): return a == b
37 def compare_digest(a,b): return a == b
38
38
39 import zmq
39 import zmq
40 from zmq.utils import jsonapi
40 from zmq.utils import jsonapi
41 from zmq.eventloop.ioloop import IOLoop
41 from zmq.eventloop.ioloop import IOLoop
42 from zmq.eventloop.zmqstream import ZMQStream
42 from zmq.eventloop.zmqstream import ZMQStream
43
43
44 from IPython.core.release import kernel_protocol_version
44 from IPython.core.release import kernel_protocol_version
45 from IPython.config.configurable import Configurable, LoggingConfigurable
45 from IPython.config.configurable import Configurable, LoggingConfigurable
46 from IPython.utils import io
46 from IPython.utils import io
47 from IPython.utils.importstring import import_item
47 from IPython.utils.importstring import import_item
48 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
48 from IPython.utils.jsonutil import extract_dates, squash_dates, date_default
49 from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type,
49 from IPython.utils.py3compat import (str_to_bytes, str_to_unicode, unicode_type,
50 iteritems)
50 iteritems)
51 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
51 from IPython.utils.traitlets import (CBytes, Unicode, Bool, Any, Instance, Set,
52 DottedObjectName, CUnicode, Dict, Integer,
52 DottedObjectName, CUnicode, Dict, Integer,
53 TraitError,
53 TraitError,
54 )
54 )
55 from IPython.utils.pickleutil import PICKLE_PROTOCOL
55 from IPython.utils.pickleutil import PICKLE_PROTOCOL
56 from IPython.kernel.adapter import adapt
56 from IPython.kernel.adapter import adapt
57 from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES
57 from IPython.kernel.zmq.serialize import MAX_ITEMS, MAX_BYTES
58
58
59 #-----------------------------------------------------------------------------
59 #-----------------------------------------------------------------------------
60 # utility functions
60 # utility functions
61 #-----------------------------------------------------------------------------
61 #-----------------------------------------------------------------------------
62
62
63 def squash_unicode(obj):
63 def squash_unicode(obj):
64 """coerce unicode back to bytestrings."""
64 """coerce unicode back to bytestrings."""
65 if isinstance(obj,dict):
65 if isinstance(obj,dict):
66 for key in obj.keys():
66 for key in obj.keys():
67 obj[key] = squash_unicode(obj[key])
67 obj[key] = squash_unicode(obj[key])
68 if isinstance(key, unicode_type):
68 if isinstance(key, unicode_type):
69 obj[squash_unicode(key)] = obj.pop(key)
69 obj[squash_unicode(key)] = obj.pop(key)
70 elif isinstance(obj, list):
70 elif isinstance(obj, list):
71 for i,v in enumerate(obj):
71 for i,v in enumerate(obj):
72 obj[i] = squash_unicode(v)
72 obj[i] = squash_unicode(v)
73 elif isinstance(obj, unicode_type):
73 elif isinstance(obj, unicode_type):
74 obj = obj.encode('utf8')
74 obj = obj.encode('utf8')
75 return obj
75 return obj
76
76
77 #-----------------------------------------------------------------------------
77 #-----------------------------------------------------------------------------
78 # globals and defaults
78 # globals and defaults
79 #-----------------------------------------------------------------------------
79 #-----------------------------------------------------------------------------
80
80
81 # ISO8601-ify datetime objects
81 # ISO8601-ify datetime objects
82 # allow unicode
82 # allow unicode
83 # disallow nan, because it's not actually valid JSON
83 # disallow nan, because it's not actually valid JSON
84 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default,
84 json_packer = lambda obj: jsonapi.dumps(obj, default=date_default,
85 ensure_ascii=False, allow_nan=False,
85 ensure_ascii=False, allow_nan=False,
86 )
86 )
87 json_unpacker = lambda s: jsonapi.loads(s)
87 json_unpacker = lambda s: jsonapi.loads(s)
88
88
89 pickle_packer = lambda o: pickle.dumps(squash_dates(o), PICKLE_PROTOCOL)
89 pickle_packer = lambda o: pickle.dumps(squash_dates(o), PICKLE_PROTOCOL)
90 pickle_unpacker = pickle.loads
90 pickle_unpacker = pickle.loads
91
91
92 default_packer = json_packer
92 default_packer = json_packer
93 default_unpacker = json_unpacker
93 default_unpacker = json_unpacker
94
94
95 DELIM = b"<IDS|MSG>"
95 DELIM = b"<IDS|MSG>"
96 # singleton dummy tracker, which will always report as done
96 # singleton dummy tracker, which will always report as done
97 DONE = zmq.MessageTracker()
97 DONE = zmq.MessageTracker()
98
98
99 #-----------------------------------------------------------------------------
99 #-----------------------------------------------------------------------------
100 # Mixin tools for apps that use Sessions
100 # Mixin tools for apps that use Sessions
101 #-----------------------------------------------------------------------------
101 #-----------------------------------------------------------------------------
102
102
103 session_aliases = dict(
103 session_aliases = dict(
104 ident = 'Session.session',
104 ident = 'Session.session',
105 user = 'Session.username',
105 user = 'Session.username',
106 keyfile = 'Session.keyfile',
106 keyfile = 'Session.keyfile',
107 )
107 )
108
108
109 session_flags = {
109 session_flags = {
110 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
110 'secure' : ({'Session' : { 'key' : str_to_bytes(str(uuid.uuid4())),
111 'keyfile' : '' }},
111 'keyfile' : '' }},
112 """Use HMAC digests for authentication of messages.
112 """Use HMAC digests for authentication of messages.
113 Setting this flag will generate a new UUID to use as the HMAC key.
113 Setting this flag will generate a new UUID to use as the HMAC key.
114 """),
114 """),
115 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
115 'no-secure' : ({'Session' : { 'key' : b'', 'keyfile' : '' }},
116 """Don't authenticate messages."""),
116 """Don't authenticate messages."""),
117 }
117 }
118
118
119 def default_secure(cfg):
119 def default_secure(cfg):
120 """Set the default behavior for a config environment to be secure.
120 """Set the default behavior for a config environment to be secure.
121
121
122 If Session.key/keyfile have not been set, set Session.key to
122 If Session.key/keyfile have not been set, set Session.key to
123 a new random UUID.
123 a new random UUID.
124 """
124 """
125
125 warnings.warn("default_secure is deprecated", DeprecationWarning)
126 if 'Session' in cfg:
126 if 'Session' in cfg:
127 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
127 if 'key' in cfg.Session or 'keyfile' in cfg.Session:
128 return
128 return
129 # key/keyfile not specified, generate new UUID:
129 # key/keyfile not specified, generate new UUID:
130 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
130 cfg.Session.key = str_to_bytes(str(uuid.uuid4()))
131
131
132
132
133 #-----------------------------------------------------------------------------
133 #-----------------------------------------------------------------------------
134 # Classes
134 # Classes
135 #-----------------------------------------------------------------------------
135 #-----------------------------------------------------------------------------
136
136
137 class SessionFactory(LoggingConfigurable):
137 class SessionFactory(LoggingConfigurable):
138 """The Base class for configurables that have a Session, Context, logger,
138 """The Base class for configurables that have a Session, Context, logger,
139 and IOLoop.
139 and IOLoop.
140 """
140 """
141
141
142 logname = Unicode('')
142 logname = Unicode('')
143 def _logname_changed(self, name, old, new):
143 def _logname_changed(self, name, old, new):
144 self.log = logging.getLogger(new)
144 self.log = logging.getLogger(new)
145
145
146 # not configurable:
146 # not configurable:
147 context = Instance('zmq.Context')
147 context = Instance('zmq.Context')
148 def _context_default(self):
148 def _context_default(self):
149 return zmq.Context.instance()
149 return zmq.Context.instance()
150
150
151 session = Instance('IPython.kernel.zmq.session.Session')
151 session = Instance('IPython.kernel.zmq.session.Session')
152
152
153 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
153 loop = Instance('zmq.eventloop.ioloop.IOLoop', allow_none=False)
154 def _loop_default(self):
154 def _loop_default(self):
155 return IOLoop.instance()
155 return IOLoop.instance()
156
156
157 def __init__(self, **kwargs):
157 def __init__(self, **kwargs):
158 super(SessionFactory, self).__init__(**kwargs)
158 super(SessionFactory, self).__init__(**kwargs)
159
159
160 if self.session is None:
160 if self.session is None:
161 # construct the session
161 # construct the session
162 self.session = Session(**kwargs)
162 self.session = Session(**kwargs)
163
163
164
164
165 class Message(object):
165 class Message(object):
166 """A simple message object that maps dict keys to attributes.
166 """A simple message object that maps dict keys to attributes.
167
167
168 A Message can be created from a dict and a dict from a Message instance
168 A Message can be created from a dict and a dict from a Message instance
169 simply by calling dict(msg_obj)."""
169 simply by calling dict(msg_obj)."""
170
170
171 def __init__(self, msg_dict):
171 def __init__(self, msg_dict):
172 dct = self.__dict__
172 dct = self.__dict__
173 for k, v in iteritems(dict(msg_dict)):
173 for k, v in iteritems(dict(msg_dict)):
174 if isinstance(v, dict):
174 if isinstance(v, dict):
175 v = Message(v)
175 v = Message(v)
176 dct[k] = v
176 dct[k] = v
177
177
178 # Having this iterator lets dict(msg_obj) work out of the box.
178 # Having this iterator lets dict(msg_obj) work out of the box.
179 def __iter__(self):
179 def __iter__(self):
180 return iter(iteritems(self.__dict__))
180 return iter(iteritems(self.__dict__))
181
181
182 def __repr__(self):
182 def __repr__(self):
183 return repr(self.__dict__)
183 return repr(self.__dict__)
184
184
185 def __str__(self):
185 def __str__(self):
186 return pprint.pformat(self.__dict__)
186 return pprint.pformat(self.__dict__)
187
187
188 def __contains__(self, k):
188 def __contains__(self, k):
189 return k in self.__dict__
189 return k in self.__dict__
190
190
191 def __getitem__(self, k):
191 def __getitem__(self, k):
192 return self.__dict__[k]
192 return self.__dict__[k]
193
193
194
194
195 def msg_header(msg_id, msg_type, username, session):
195 def msg_header(msg_id, msg_type, username, session):
196 date = datetime.now()
196 date = datetime.now()
197 version = kernel_protocol_version
197 version = kernel_protocol_version
198 return locals()
198 return locals()
199
199
200 def extract_header(msg_or_header):
200 def extract_header(msg_or_header):
201 """Given a message or header, return the header."""
201 """Given a message or header, return the header."""
202 if not msg_or_header:
202 if not msg_or_header:
203 return {}
203 return {}
204 try:
204 try:
205 # See if msg_or_header is the entire message.
205 # See if msg_or_header is the entire message.
206 h = msg_or_header['header']
206 h = msg_or_header['header']
207 except KeyError:
207 except KeyError:
208 try:
208 try:
209 # See if msg_or_header is just the header
209 # See if msg_or_header is just the header
210 h = msg_or_header['msg_id']
210 h = msg_or_header['msg_id']
211 except KeyError:
211 except KeyError:
212 raise
212 raise
213 else:
213 else:
214 h = msg_or_header
214 h = msg_or_header
215 if not isinstance(h, dict):
215 if not isinstance(h, dict):
216 h = dict(h)
216 h = dict(h)
217 return h
217 return h
218
218
219 class Session(Configurable):
219 class Session(Configurable):
220 """Object for handling serialization and sending of messages.
220 """Object for handling serialization and sending of messages.
221
221
222 The Session object handles building messages and sending them
222 The Session object handles building messages and sending them
223 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
223 with ZMQ sockets or ZMQStream objects. Objects can communicate with each
224 other over the network via Session objects, and only need to work with the
224 other over the network via Session objects, and only need to work with the
225 dict-based IPython message spec. The Session will handle
225 dict-based IPython message spec. The Session will handle
226 serialization/deserialization, security, and metadata.
226 serialization/deserialization, security, and metadata.
227
227
228 Sessions support configurable serialization via packer/unpacker traits,
228 Sessions support configurable serialization via packer/unpacker traits,
229 and signing with HMAC digests via the key/keyfile traits.
229 and signing with HMAC digests via the key/keyfile traits.
230
230
231 Parameters
231 Parameters
232 ----------
232 ----------
233
233
234 debug : bool
234 debug : bool
235 whether to trigger extra debugging statements
235 whether to trigger extra debugging statements
236 packer/unpacker : str : 'json', 'pickle' or import_string
236 packer/unpacker : str : 'json', 'pickle' or import_string
237 importstrings for methods to serialize message parts. If just
237 importstrings for methods to serialize message parts. If just
238 'json' or 'pickle', predefined JSON and pickle packers will be used.
238 'json' or 'pickle', predefined JSON and pickle packers will be used.
239 Otherwise, the entire importstring must be used.
239 Otherwise, the entire importstring must be used.
240
240
241 The functions must accept at least valid JSON input, and output *bytes*.
241 The functions must accept at least valid JSON input, and output *bytes*.
242
242
243 For example, to use msgpack:
243 For example, to use msgpack:
244 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
244 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
245 pack/unpack : callables
245 pack/unpack : callables
246 You can also set the pack/unpack callables for serialization directly.
246 You can also set the pack/unpack callables for serialization directly.
247 session : bytes
247 session : bytes
248 the ID of this Session object. The default is to generate a new UUID.
248 the ID of this Session object. The default is to generate a new UUID.
249 username : unicode
249 username : unicode
250 username added to message headers. The default is to ask the OS.
250 username added to message headers. The default is to ask the OS.
251 key : bytes
251 key : bytes
252 The key used to initialize an HMAC signature. If unset, messages
252 The key used to initialize an HMAC signature. If unset, messages
253 will not be signed or checked.
253 will not be signed or checked.
254 keyfile : filepath
254 keyfile : filepath
255 The file containing a key. If this is set, `key` will be initialized
255 The file containing a key. If this is set, `key` will be initialized
256 to the contents of the file.
256 to the contents of the file.
257
257
258 """
258 """
259
259
260 debug=Bool(False, config=True, help="""Debug output in the Session""")
260 debug=Bool(False, config=True, help="""Debug output in the Session""")
261
261
262 packer = DottedObjectName('json',config=True,
262 packer = DottedObjectName('json',config=True,
263 help="""The name of the packer for serializing messages.
263 help="""The name of the packer for serializing messages.
264 Should be one of 'json', 'pickle', or an import name
264 Should be one of 'json', 'pickle', or an import name
265 for a custom callable serializer.""")
265 for a custom callable serializer.""")
266 def _packer_changed(self, name, old, new):
266 def _packer_changed(self, name, old, new):
267 if new.lower() == 'json':
267 if new.lower() == 'json':
268 self.pack = json_packer
268 self.pack = json_packer
269 self.unpack = json_unpacker
269 self.unpack = json_unpacker
270 self.unpacker = new
270 self.unpacker = new
271 elif new.lower() == 'pickle':
271 elif new.lower() == 'pickle':
272 self.pack = pickle_packer
272 self.pack = pickle_packer
273 self.unpack = pickle_unpacker
273 self.unpack = pickle_unpacker
274 self.unpacker = new
274 self.unpacker = new
275 else:
275 else:
276 self.pack = import_item(str(new))
276 self.pack = import_item(str(new))
277
277
278 unpacker = DottedObjectName('json', config=True,
278 unpacker = DottedObjectName('json', config=True,
279 help="""The name of the unpacker for unserializing messages.
279 help="""The name of the unpacker for unserializing messages.
280 Only used with custom functions for `packer`.""")
280 Only used with custom functions for `packer`.""")
281 def _unpacker_changed(self, name, old, new):
281 def _unpacker_changed(self, name, old, new):
282 if new.lower() == 'json':
282 if new.lower() == 'json':
283 self.pack = json_packer
283 self.pack = json_packer
284 self.unpack = json_unpacker
284 self.unpack = json_unpacker
285 self.packer = new
285 self.packer = new
286 elif new.lower() == 'pickle':
286 elif new.lower() == 'pickle':
287 self.pack = pickle_packer
287 self.pack = pickle_packer
288 self.unpack = pickle_unpacker
288 self.unpack = pickle_unpacker
289 self.packer = new
289 self.packer = new
290 else:
290 else:
291 self.unpack = import_item(str(new))
291 self.unpack = import_item(str(new))
292
292
293 session = CUnicode(u'', config=True,
293 session = CUnicode(u'', config=True,
294 help="""The UUID identifying this session.""")
294 help="""The UUID identifying this session.""")
295 def _session_default(self):
295 def _session_default(self):
296 u = unicode_type(uuid.uuid4())
296 u = unicode_type(uuid.uuid4())
297 self.bsession = u.encode('ascii')
297 self.bsession = u.encode('ascii')
298 return u
298 return u
299
299
300 def _session_changed(self, name, old, new):
300 def _session_changed(self, name, old, new):
301 self.bsession = self.session.encode('ascii')
301 self.bsession = self.session.encode('ascii')
302
302
303 # bsession is the session as bytes
303 # bsession is the session as bytes
304 bsession = CBytes(b'')
304 bsession = CBytes(b'')
305
305
306 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
306 username = Unicode(str_to_unicode(os.environ.get('USER', 'username')),
307 help="""Username for the Session. Default is your system username.""",
307 help="""Username for the Session. Default is your system username.""",
308 config=True)
308 config=True)
309
309
310 metadata = Dict({}, config=True,
310 metadata = Dict({}, config=True,
311 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
311 help="""Metadata dictionary, which serves as the default top-level metadata dict for each message.""")
312
312
313 # if 0, no adapting to do.
313 # if 0, no adapting to do.
314 adapt_version = Integer(0)
314 adapt_version = Integer(0)
315
315
316 # message signature related traits:
316 # message signature related traits:
317
317
318 key = CBytes(b'', config=True,
318 key = CBytes(config=True,
319 help="""execution key, for extra authentication.""")
319 help="""execution key, for signing messages.""")
320 def _key_default(self):
321 return str_to_bytes(str(uuid.uuid4()))
322
320 def _key_changed(self):
323 def _key_changed(self):
321 self._new_auth()
324 self._new_auth()
322
325
323 signature_scheme = Unicode('hmac-sha256', config=True,
326 signature_scheme = Unicode('hmac-sha256', config=True,
324 help="""The digest scheme used to construct the message signatures.
327 help="""The digest scheme used to construct the message signatures.
325 Must have the form 'hmac-HASH'.""")
328 Must have the form 'hmac-HASH'.""")
326 def _signature_scheme_changed(self, name, old, new):
329 def _signature_scheme_changed(self, name, old, new):
327 if not new.startswith('hmac-'):
330 if not new.startswith('hmac-'):
328 raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
331 raise TraitError("signature_scheme must start with 'hmac-', got %r" % new)
329 hash_name = new.split('-', 1)[1]
332 hash_name = new.split('-', 1)[1]
330 try:
333 try:
331 self.digest_mod = getattr(hashlib, hash_name)
334 self.digest_mod = getattr(hashlib, hash_name)
332 except AttributeError:
335 except AttributeError:
333 raise TraitError("hashlib has no such attribute: %s" % hash_name)
336 raise TraitError("hashlib has no such attribute: %s" % hash_name)
334 self._new_auth()
337 self._new_auth()
335
338
336 digest_mod = Any()
339 digest_mod = Any()
337 def _digest_mod_default(self):
340 def _digest_mod_default(self):
338 return hashlib.sha256
341 return hashlib.sha256
339
342
340 auth = Instance(hmac.HMAC)
343 auth = Instance(hmac.HMAC)
341
344
342 def _new_auth(self):
345 def _new_auth(self):
343 if self.key:
346 if self.key:
344 self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)
347 self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod)
345 else:
348 else:
346 self.auth = None
349 self.auth = None
347
350
348 digest_history = Set()
351 digest_history = Set()
349 digest_history_size = Integer(2**16, config=True,
352 digest_history_size = Integer(2**16, config=True,
350 help="""The maximum number of digests to remember.
353 help="""The maximum number of digests to remember.
351
354
352 The digest history will be culled when it exceeds this value.
355 The digest history will be culled when it exceeds this value.
353 """
356 """
354 )
357 )
355
358
356 keyfile = Unicode('', config=True,
359 keyfile = Unicode('', config=True,
357 help="""path to file containing execution key.""")
360 help="""path to file containing execution key.""")
358 def _keyfile_changed(self, name, old, new):
361 def _keyfile_changed(self, name, old, new):
359 with open(new, 'rb') as f:
362 with open(new, 'rb') as f:
360 self.key = f.read().strip()
363 self.key = f.read().strip()
361
364
362 # for protecting against sends from forks
365 # for protecting against sends from forks
363 pid = Integer()
366 pid = Integer()
364
367
365 # serialization traits:
368 # serialization traits:
366
369
367 pack = Any(default_packer) # the actual packer function
370 pack = Any(default_packer) # the actual packer function
368 def _pack_changed(self, name, old, new):
371 def _pack_changed(self, name, old, new):
369 if not callable(new):
372 if not callable(new):
370 raise TypeError("packer must be callable, not %s"%type(new))
373 raise TypeError("packer must be callable, not %s"%type(new))
371
374
372 unpack = Any(default_unpacker) # the actual packer function
375 unpack = Any(default_unpacker) # the actual packer function
373 def _unpack_changed(self, name, old, new):
376 def _unpack_changed(self, name, old, new):
374 # unpacker is not checked - it is assumed to be
377 # unpacker is not checked - it is assumed to be
375 if not callable(new):
378 if not callable(new):
376 raise TypeError("unpacker must be callable, not %s"%type(new))
379 raise TypeError("unpacker must be callable, not %s"%type(new))
377
380
378 # thresholds:
381 # thresholds:
379 copy_threshold = Integer(2**16, config=True,
382 copy_threshold = Integer(2**16, config=True,
380 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
383 help="Threshold (in bytes) beyond which a buffer should be sent without copying.")
381 buffer_threshold = Integer(MAX_BYTES, config=True,
384 buffer_threshold = Integer(MAX_BYTES, config=True,
382 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
385 help="Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling.")
383 item_threshold = Integer(MAX_ITEMS, config=True,
386 item_threshold = Integer(MAX_ITEMS, config=True,
384 help="""The maximum number of items for a container to be introspected for custom serialization.
387 help="""The maximum number of items for a container to be introspected for custom serialization.
385 Containers larger than this are pickled outright.
388 Containers larger than this are pickled outright.
386 """
389 """
387 )
390 )
388
391
389
392
390 def __init__(self, **kwargs):
393 def __init__(self, **kwargs):
391 """create a Session object
394 """create a Session object
392
395
393 Parameters
396 Parameters
394 ----------
397 ----------
395
398
396 debug : bool
399 debug : bool
397 whether to trigger extra debugging statements
400 whether to trigger extra debugging statements
398 packer/unpacker : str : 'json', 'pickle' or import_string
401 packer/unpacker : str : 'json', 'pickle' or import_string
399 importstrings for methods to serialize message parts. If just
402 importstrings for methods to serialize message parts. If just
400 'json' or 'pickle', predefined JSON and pickle packers will be used.
403 'json' or 'pickle', predefined JSON and pickle packers will be used.
401 Otherwise, the entire importstring must be used.
404 Otherwise, the entire importstring must be used.
402
405
403 The functions must accept at least valid JSON input, and output
406 The functions must accept at least valid JSON input, and output
404 *bytes*.
407 *bytes*.
405
408
406 For example, to use msgpack:
409 For example, to use msgpack:
407 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
410 packer = 'msgpack.packb', unpacker='msgpack.unpackb'
408 pack/unpack : callables
411 pack/unpack : callables
409 You can also set the pack/unpack callables for serialization
412 You can also set the pack/unpack callables for serialization
410 directly.
413 directly.
411 session : unicode (must be ascii)
414 session : unicode (must be ascii)
412 the ID of this Session object. The default is to generate a new
415 the ID of this Session object. The default is to generate a new
413 UUID.
416 UUID.
414 bsession : bytes
417 bsession : bytes
415 The session as bytes
418 The session as bytes
416 username : unicode
419 username : unicode
417 username added to message headers. The default is to ask the OS.
420 username added to message headers. The default is to ask the OS.
418 key : bytes
421 key : bytes
419 The key used to initialize an HMAC signature. If unset, messages
422 The key used to initialize an HMAC signature. If unset, messages
420 will not be signed or checked.
423 will not be signed or checked.
421 signature_scheme : str
424 signature_scheme : str
422 The message digest scheme. Currently must be of the form 'hmac-HASH',
425 The message digest scheme. Currently must be of the form 'hmac-HASH',
423 where 'HASH' is a hashing function available in Python's hashlib.
426 where 'HASH' is a hashing function available in Python's hashlib.
424 The default is 'hmac-sha256'.
427 The default is 'hmac-sha256'.
425 This is ignored if 'key' is empty.
428 This is ignored if 'key' is empty.
426 keyfile : filepath
429 keyfile : filepath
427 The file containing a key. If this is set, `key` will be
430 The file containing a key. If this is set, `key` will be
428 initialized to the contents of the file.
431 initialized to the contents of the file.
429 """
432 """
430 super(Session, self).__init__(**kwargs)
433 super(Session, self).__init__(**kwargs)
431 self._check_packers()
434 self._check_packers()
432 self.none = self.pack({})
435 self.none = self.pack({})
433 # ensure self._session_default() if necessary, so bsession is defined:
436 # ensure self._session_default() if necessary, so bsession is defined:
434 self.session
437 self.session
435 self.pid = os.getpid()
438 self.pid = os.getpid()
439 self._new_auth()
436
440
437 @property
441 @property
438 def msg_id(self):
442 def msg_id(self):
439 """always return new uuid"""
443 """always return new uuid"""
440 return str(uuid.uuid4())
444 return str(uuid.uuid4())
441
445
442 def _check_packers(self):
446 def _check_packers(self):
443 """check packers for datetime support."""
447 """check packers for datetime support."""
444 pack = self.pack
448 pack = self.pack
445 unpack = self.unpack
449 unpack = self.unpack
446
450
447 # check simple serialization
451 # check simple serialization
448 msg = dict(a=[1,'hi'])
452 msg = dict(a=[1,'hi'])
449 try:
453 try:
450 packed = pack(msg)
454 packed = pack(msg)
451 except Exception as e:
455 except Exception as e:
452 msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
456 msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}"
453 if self.packer == 'json':
457 if self.packer == 'json':
454 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
458 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
455 else:
459 else:
456 jsonmsg = ""
460 jsonmsg = ""
457 raise ValueError(
461 raise ValueError(
458 msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)
462 msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)
459 )
463 )
460
464
461 # ensure packed message is bytes
465 # ensure packed message is bytes
462 if not isinstance(packed, bytes):
466 if not isinstance(packed, bytes):
463 raise ValueError("message packed to %r, but bytes are required"%type(packed))
467 raise ValueError("message packed to %r, but bytes are required"%type(packed))
464
468
465 # check that unpack is pack's inverse
469 # check that unpack is pack's inverse
466 try:
470 try:
467 unpacked = unpack(packed)
471 unpacked = unpack(packed)
468 assert unpacked == msg
472 assert unpacked == msg
469 except Exception as e:
473 except Exception as e:
470 msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
474 msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}"
471 if self.packer == 'json':
475 if self.packer == 'json':
472 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
476 jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod
473 else:
477 else:
474 jsonmsg = ""
478 jsonmsg = ""
475 raise ValueError(
479 raise ValueError(
476 msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)
480 msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)
477 )
481 )
478
482
479 # check datetime support
483 # check datetime support
480 msg = dict(t=datetime.now())
484 msg = dict(t=datetime.now())
481 try:
485 try:
482 unpacked = unpack(pack(msg))
486 unpacked = unpack(pack(msg))
483 if isinstance(unpacked['t'], datetime):
487 if isinstance(unpacked['t'], datetime):
484 raise ValueError("Shouldn't deserialize to datetime")
488 raise ValueError("Shouldn't deserialize to datetime")
485 except Exception:
489 except Exception:
486 self.pack = lambda o: pack(squash_dates(o))
490 self.pack = lambda o: pack(squash_dates(o))
487 self.unpack = lambda s: unpack(s)
491 self.unpack = lambda s: unpack(s)
488
492
489 def msg_header(self, msg_type):
493 def msg_header(self, msg_type):
490 return msg_header(self.msg_id, msg_type, self.username, self.session)
494 return msg_header(self.msg_id, msg_type, self.username, self.session)
491
495
492 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
496 def msg(self, msg_type, content=None, parent=None, header=None, metadata=None):
493 """Return the nested message dict.
497 """Return the nested message dict.
494
498
495 This format is different from what is sent over the wire. The
499 This format is different from what is sent over the wire. The
496 serialize/deserialize methods converts this nested message dict to the wire
500 serialize/deserialize methods converts this nested message dict to the wire
497 format, which is a list of message parts.
501 format, which is a list of message parts.
498 """
502 """
499 msg = {}
503 msg = {}
500 header = self.msg_header(msg_type) if header is None else header
504 header = self.msg_header(msg_type) if header is None else header
501 msg['header'] = header
505 msg['header'] = header
502 msg['msg_id'] = header['msg_id']
506 msg['msg_id'] = header['msg_id']
503 msg['msg_type'] = header['msg_type']
507 msg['msg_type'] = header['msg_type']
504 msg['parent_header'] = {} if parent is None else extract_header(parent)
508 msg['parent_header'] = {} if parent is None else extract_header(parent)
505 msg['content'] = {} if content is None else content
509 msg['content'] = {} if content is None else content
506 msg['metadata'] = self.metadata.copy()
510 msg['metadata'] = self.metadata.copy()
507 if metadata is not None:
511 if metadata is not None:
508 msg['metadata'].update(metadata)
512 msg['metadata'].update(metadata)
509 return msg
513 return msg
510
514
511 def sign(self, msg_list):
515 def sign(self, msg_list):
512 """Sign a message with HMAC digest. If no auth, return b''.
516 """Sign a message with HMAC digest. If no auth, return b''.
513
517
514 Parameters
518 Parameters
515 ----------
519 ----------
516 msg_list : list
520 msg_list : list
517 The [p_header,p_parent,p_content] part of the message list.
521 The [p_header,p_parent,p_content] part of the message list.
518 """
522 """
519 if self.auth is None:
523 if self.auth is None:
520 return b''
524 return b''
521 h = self.auth.copy()
525 h = self.auth.copy()
522 for m in msg_list:
526 for m in msg_list:
523 h.update(m)
527 h.update(m)
524 return str_to_bytes(h.hexdigest())
528 return str_to_bytes(h.hexdigest())
525
529
526 def serialize(self, msg, ident=None):
530 def serialize(self, msg, ident=None):
527 """Serialize the message components to bytes.
531 """Serialize the message components to bytes.
528
532
529 This is roughly the inverse of deserialize. The serialize/deserialize
533 This is roughly the inverse of deserialize. The serialize/deserialize
530 methods work with full message lists, whereas pack/unpack work with
534 methods work with full message lists, whereas pack/unpack work with
531 the individual message parts in the message list.
535 the individual message parts in the message list.
532
536
533 Parameters
537 Parameters
534 ----------
538 ----------
535 msg : dict or Message
539 msg : dict or Message
536 The next message dict as returned by the self.msg method.
540 The next message dict as returned by the self.msg method.
537
541
538 Returns
542 Returns
539 -------
543 -------
540 msg_list : list
544 msg_list : list
541 The list of bytes objects to be sent with the format::
545 The list of bytes objects to be sent with the format::
542
546
543 [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,
547 [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent,
544 p_metadata, p_content, buffer1, buffer2, ...]
548 p_metadata, p_content, buffer1, buffer2, ...]
545
549
546 In this list, the ``p_*`` entities are the packed or serialized
550 In this list, the ``p_*`` entities are the packed or serialized
547 versions, so if JSON is used, these are utf8 encoded JSON strings.
551 versions, so if JSON is used, these are utf8 encoded JSON strings.
548 """
552 """
549 content = msg.get('content', {})
553 content = msg.get('content', {})
550 if content is None:
554 if content is None:
551 content = self.none
555 content = self.none
552 elif isinstance(content, dict):
556 elif isinstance(content, dict):
553 content = self.pack(content)
557 content = self.pack(content)
554 elif isinstance(content, bytes):
558 elif isinstance(content, bytes):
555 # content is already packed, as in a relayed message
559 # content is already packed, as in a relayed message
556 pass
560 pass
557 elif isinstance(content, unicode_type):
561 elif isinstance(content, unicode_type):
558 # should be bytes, but JSON often spits out unicode
562 # should be bytes, but JSON often spits out unicode
559 content = content.encode('utf8')
563 content = content.encode('utf8')
560 else:
564 else:
561 raise TypeError("Content incorrect type: %s"%type(content))
565 raise TypeError("Content incorrect type: %s"%type(content))
562
566
563 real_message = [self.pack(msg['header']),
567 real_message = [self.pack(msg['header']),
564 self.pack(msg['parent_header']),
568 self.pack(msg['parent_header']),
565 self.pack(msg['metadata']),
569 self.pack(msg['metadata']),
566 content,
570 content,
567 ]
571 ]
568
572
569 to_send = []
573 to_send = []
570
574
571 if isinstance(ident, list):
575 if isinstance(ident, list):
572 # accept list of idents
576 # accept list of idents
573 to_send.extend(ident)
577 to_send.extend(ident)
574 elif ident is not None:
578 elif ident is not None:
575 to_send.append(ident)
579 to_send.append(ident)
576 to_send.append(DELIM)
580 to_send.append(DELIM)
577
581
578 signature = self.sign(real_message)
582 signature = self.sign(real_message)
579 to_send.append(signature)
583 to_send.append(signature)
580
584
581 to_send.extend(real_message)
585 to_send.extend(real_message)
582
586
583 return to_send
587 return to_send
584
588
585 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
589 def send(self, stream, msg_or_type, content=None, parent=None, ident=None,
586 buffers=None, track=False, header=None, metadata=None):
590 buffers=None, track=False, header=None, metadata=None):
587 """Build and send a message via stream or socket.
591 """Build and send a message via stream or socket.
588
592
589 The message format used by this function internally is as follows:
593 The message format used by this function internally is as follows:
590
594
591 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
595 [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content,
592 buffer1,buffer2,...]
596 buffer1,buffer2,...]
593
597
594 The serialize/deserialize methods convert the nested message dict into this
598 The serialize/deserialize methods convert the nested message dict into this
595 format.
599 format.
596
600
597 Parameters
601 Parameters
598 ----------
602 ----------
599
603
600 stream : zmq.Socket or ZMQStream
604 stream : zmq.Socket or ZMQStream
601 The socket-like object used to send the data.
605 The socket-like object used to send the data.
602 msg_or_type : str or Message/dict
606 msg_or_type : str or Message/dict
603 Normally, msg_or_type will be a msg_type unless a message is being
607 Normally, msg_or_type will be a msg_type unless a message is being
604 sent more than once. If a header is supplied, this can be set to
608 sent more than once. If a header is supplied, this can be set to
605 None and the msg_type will be pulled from the header.
609 None and the msg_type will be pulled from the header.
606
610
607 content : dict or None
611 content : dict or None
608 The content of the message (ignored if msg_or_type is a message).
612 The content of the message (ignored if msg_or_type is a message).
609 header : dict or None
613 header : dict or None
610 The header dict for the message (ignored if msg_to_type is a message).
614 The header dict for the message (ignored if msg_to_type is a message).
611 parent : Message or dict or None
615 parent : Message or dict or None
612 The parent or parent header describing the parent of this message
616 The parent or parent header describing the parent of this message
613 (ignored if msg_or_type is a message).
617 (ignored if msg_or_type is a message).
614 ident : bytes or list of bytes
618 ident : bytes or list of bytes
615 The zmq.IDENTITY routing path.
619 The zmq.IDENTITY routing path.
616 metadata : dict or None
620 metadata : dict or None
617 The metadata describing the message
621 The metadata describing the message
618 buffers : list or None
622 buffers : list or None
619 The already-serialized buffers to be appended to the message.
623 The already-serialized buffers to be appended to the message.
620 track : bool
624 track : bool
621 Whether to track. Only for use with Sockets, because ZMQStream
625 Whether to track. Only for use with Sockets, because ZMQStream
622 objects cannot track messages.
626 objects cannot track messages.
623
627
624
628
625 Returns
629 Returns
626 -------
630 -------
627 msg : dict
631 msg : dict
628 The constructed message.
632 The constructed message.
629 """
633 """
630 if not isinstance(stream, zmq.Socket):
634 if not isinstance(stream, zmq.Socket):
631 # ZMQStreams and dummy sockets do not support tracking.
635 # ZMQStreams and dummy sockets do not support tracking.
632 track = False
636 track = False
633
637
634 if isinstance(msg_or_type, (Message, dict)):
638 if isinstance(msg_or_type, (Message, dict)):
635 # We got a Message or message dict, not a msg_type so don't
639 # We got a Message or message dict, not a msg_type so don't
636 # build a new Message.
640 # build a new Message.
637 msg = msg_or_type
641 msg = msg_or_type
638 buffers = buffers or msg.get('buffers', [])
642 buffers = buffers or msg.get('buffers', [])
639 else:
643 else:
640 msg = self.msg(msg_or_type, content=content, parent=parent,
644 msg = self.msg(msg_or_type, content=content, parent=parent,
641 header=header, metadata=metadata)
645 header=header, metadata=metadata)
642 if not os.getpid() == self.pid:
646 if not os.getpid() == self.pid:
643 io.rprint("WARNING: attempted to send message from fork")
647 io.rprint("WARNING: attempted to send message from fork")
644 io.rprint(msg)
648 io.rprint(msg)
645 return
649 return
646 buffers = [] if buffers is None else buffers
650 buffers = [] if buffers is None else buffers
647 if self.adapt_version:
651 if self.adapt_version:
648 msg = adapt(msg, self.adapt_version)
652 msg = adapt(msg, self.adapt_version)
649 to_send = self.serialize(msg, ident)
653 to_send = self.serialize(msg, ident)
650 to_send.extend(buffers)
654 to_send.extend(buffers)
651 longest = max([ len(s) for s in to_send ])
655 longest = max([ len(s) for s in to_send ])
652 copy = (longest < self.copy_threshold)
656 copy = (longest < self.copy_threshold)
653
657
654 if buffers and track and not copy:
658 if buffers and track and not copy:
655 # only really track when we are doing zero-copy buffers
659 # only really track when we are doing zero-copy buffers
656 tracker = stream.send_multipart(to_send, copy=False, track=True)
660 tracker = stream.send_multipart(to_send, copy=False, track=True)
657 else:
661 else:
658 # use dummy tracker, which will be done immediately
662 # use dummy tracker, which will be done immediately
659 tracker = DONE
663 tracker = DONE
660 stream.send_multipart(to_send, copy=copy)
664 stream.send_multipart(to_send, copy=copy)
661
665
662 if self.debug:
666 if self.debug:
663 pprint.pprint(msg)
667 pprint.pprint(msg)
664 pprint.pprint(to_send)
668 pprint.pprint(to_send)
665 pprint.pprint(buffers)
669 pprint.pprint(buffers)
666
670
667 msg['tracker'] = tracker
671 msg['tracker'] = tracker
668
672
669 return msg
673 return msg
670
674
671 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
675 def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None):
672 """Send a raw message via ident path.
676 """Send a raw message via ident path.
673
677
674 This method is used to send a already serialized message.
678 This method is used to send a already serialized message.
675
679
676 Parameters
680 Parameters
677 ----------
681 ----------
678 stream : ZMQStream or Socket
682 stream : ZMQStream or Socket
679 The ZMQ stream or socket to use for sending the message.
683 The ZMQ stream or socket to use for sending the message.
680 msg_list : list
684 msg_list : list
681 The serialized list of messages to send. This only includes the
685 The serialized list of messages to send. This only includes the
682 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
686 [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of
683 the message.
687 the message.
684 ident : ident or list
688 ident : ident or list
685 A single ident or a list of idents to use in sending.
689 A single ident or a list of idents to use in sending.
686 """
690 """
687 to_send = []
691 to_send = []
688 if isinstance(ident, bytes):
692 if isinstance(ident, bytes):
689 ident = [ident]
693 ident = [ident]
690 if ident is not None:
694 if ident is not None:
691 to_send.extend(ident)
695 to_send.extend(ident)
692
696
693 to_send.append(DELIM)
697 to_send.append(DELIM)
694 to_send.append(self.sign(msg_list))
698 to_send.append(self.sign(msg_list))
695 to_send.extend(msg_list)
699 to_send.extend(msg_list)
696 stream.send_multipart(to_send, flags, copy=copy)
700 stream.send_multipart(to_send, flags, copy=copy)
697
701
698 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
702 def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True):
699 """Receive and unpack a message.
703 """Receive and unpack a message.
700
704
701 Parameters
705 Parameters
702 ----------
706 ----------
703 socket : ZMQStream or Socket
707 socket : ZMQStream or Socket
704 The socket or stream to use in receiving.
708 The socket or stream to use in receiving.
705
709
706 Returns
710 Returns
707 -------
711 -------
708 [idents], msg
712 [idents], msg
709 [idents] is a list of idents and msg is a nested message dict of
713 [idents] is a list of idents and msg is a nested message dict of
710 same format as self.msg returns.
714 same format as self.msg returns.
711 """
715 """
712 if isinstance(socket, ZMQStream):
716 if isinstance(socket, ZMQStream):
713 socket = socket.socket
717 socket = socket.socket
714 try:
718 try:
715 msg_list = socket.recv_multipart(mode, copy=copy)
719 msg_list = socket.recv_multipart(mode, copy=copy)
716 except zmq.ZMQError as e:
720 except zmq.ZMQError as e:
717 if e.errno == zmq.EAGAIN:
721 if e.errno == zmq.EAGAIN:
718 # We can convert EAGAIN to None as we know in this case
722 # We can convert EAGAIN to None as we know in this case
719 # recv_multipart won't return None.
723 # recv_multipart won't return None.
720 return None,None
724 return None,None
721 else:
725 else:
722 raise
726 raise
723 # split multipart message into identity list and message dict
727 # split multipart message into identity list and message dict
724 # invalid large messages can cause very expensive string comparisons
728 # invalid large messages can cause very expensive string comparisons
725 idents, msg_list = self.feed_identities(msg_list, copy)
729 idents, msg_list = self.feed_identities(msg_list, copy)
726 try:
730 try:
727 return idents, self.deserialize(msg_list, content=content, copy=copy)
731 return idents, self.deserialize(msg_list, content=content, copy=copy)
728 except Exception as e:
732 except Exception as e:
729 # TODO: handle it
733 # TODO: handle it
730 raise e
734 raise e
731
735
732 def feed_identities(self, msg_list, copy=True):
736 def feed_identities(self, msg_list, copy=True):
733 """Split the identities from the rest of the message.
737 """Split the identities from the rest of the message.
734
738
735 Feed until DELIM is reached, then return the prefix as idents and
739 Feed until DELIM is reached, then return the prefix as idents and
736 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
740 remainder as msg_list. This is easily broken by setting an IDENT to DELIM,
737 but that would be silly.
741 but that would be silly.
738
742
739 Parameters
743 Parameters
740 ----------
744 ----------
741 msg_list : a list of Message or bytes objects
745 msg_list : a list of Message or bytes objects
742 The message to be split.
746 The message to be split.
743 copy : bool
747 copy : bool
744 flag determining whether the arguments are bytes or Messages
748 flag determining whether the arguments are bytes or Messages
745
749
746 Returns
750 Returns
747 -------
751 -------
748 (idents, msg_list) : two lists
752 (idents, msg_list) : two lists
749 idents will always be a list of bytes, each of which is a ZMQ
753 idents will always be a list of bytes, each of which is a ZMQ
750 identity. msg_list will be a list of bytes or zmq.Messages of the
754 identity. msg_list will be a list of bytes or zmq.Messages of the
751 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
755 form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and
752 should be unpackable/unserializable via self.deserialize at this
756 should be unpackable/unserializable via self.deserialize at this
753 point.
757 point.
754 """
758 """
755 if copy:
759 if copy:
756 idx = msg_list.index(DELIM)
760 idx = msg_list.index(DELIM)
757 return msg_list[:idx], msg_list[idx+1:]
761 return msg_list[:idx], msg_list[idx+1:]
758 else:
762 else:
759 failed = True
763 failed = True
760 for idx,m in enumerate(msg_list):
764 for idx,m in enumerate(msg_list):
761 if m.bytes == DELIM:
765 if m.bytes == DELIM:
762 failed = False
766 failed = False
763 break
767 break
764 if failed:
768 if failed:
765 raise ValueError("DELIM not in msg_list")
769 raise ValueError("DELIM not in msg_list")
766 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
770 idents, msg_list = msg_list[:idx], msg_list[idx+1:]
767 return [m.bytes for m in idents], msg_list
771 return [m.bytes for m in idents], msg_list
768
772
769 def _add_digest(self, signature):
773 def _add_digest(self, signature):
770 """add a digest to history to protect against replay attacks"""
774 """add a digest to history to protect against replay attacks"""
771 if self.digest_history_size == 0:
775 if self.digest_history_size == 0:
772 # no history, never add digests
776 # no history, never add digests
773 return
777 return
774
778
775 self.digest_history.add(signature)
779 self.digest_history.add(signature)
776 if len(self.digest_history) > self.digest_history_size:
780 if len(self.digest_history) > self.digest_history_size:
777 # threshold reached, cull 10%
781 # threshold reached, cull 10%
778 self._cull_digest_history()
782 self._cull_digest_history()
779
783
780 def _cull_digest_history(self):
784 def _cull_digest_history(self):
781 """cull the digest history
785 """cull the digest history
782
786
783 Removes a randomly selected 10% of the digest history
787 Removes a randomly selected 10% of the digest history
784 """
788 """
785 current = len(self.digest_history)
789 current = len(self.digest_history)
786 n_to_cull = max(int(current // 10), current - self.digest_history_size)
790 n_to_cull = max(int(current // 10), current - self.digest_history_size)
787 if n_to_cull >= current:
791 if n_to_cull >= current:
788 self.digest_history = set()
792 self.digest_history = set()
789 return
793 return
790 to_cull = random.sample(self.digest_history, n_to_cull)
794 to_cull = random.sample(self.digest_history, n_to_cull)
791 self.digest_history.difference_update(to_cull)
795 self.digest_history.difference_update(to_cull)
792
796
793 def deserialize(self, msg_list, content=True, copy=True):
797 def deserialize(self, msg_list, content=True, copy=True):
794 """Unserialize a msg_list to a nested message dict.
798 """Unserialize a msg_list to a nested message dict.
795
799
796 This is roughly the inverse of serialize. The serialize/deserialize
800 This is roughly the inverse of serialize. The serialize/deserialize
797 methods work with full message lists, whereas pack/unpack work with
801 methods work with full message lists, whereas pack/unpack work with
798 the individual message parts in the message list.
802 the individual message parts in the message list.
799
803
800 Parameters
804 Parameters
801 ----------
805 ----------
802 msg_list : list of bytes or Message objects
806 msg_list : list of bytes or Message objects
803 The list of message parts of the form [HMAC,p_header,p_parent,
807 The list of message parts of the form [HMAC,p_header,p_parent,
804 p_metadata,p_content,buffer1,buffer2,...].
808 p_metadata,p_content,buffer1,buffer2,...].
805 content : bool (True)
809 content : bool (True)
806 Whether to unpack the content dict (True), or leave it packed
810 Whether to unpack the content dict (True), or leave it packed
807 (False).
811 (False).
808 copy : bool (True)
812 copy : bool (True)
809 Whether to return the bytes (True), or the non-copying Message
813 Whether to return the bytes (True), or the non-copying Message
810 object in each place (False).
814 object in each place (False).
811
815
812 Returns
816 Returns
813 -------
817 -------
814 msg : dict
818 msg : dict
815 The nested message dict with top-level keys [header, parent_header,
819 The nested message dict with top-level keys [header, parent_header,
816 content, buffers].
820 content, buffers].
817 """
821 """
818 minlen = 5
822 minlen = 5
819 message = {}
823 message = {}
820 if not copy:
824 if not copy:
821 for i in range(minlen):
825 for i in range(minlen):
822 msg_list[i] = msg_list[i].bytes
826 msg_list[i] = msg_list[i].bytes
823 if self.auth is not None:
827 if self.auth is not None:
824 signature = msg_list[0]
828 signature = msg_list[0]
825 if not signature:
829 if not signature:
826 raise ValueError("Unsigned Message")
830 raise ValueError("Unsigned Message")
827 if signature in self.digest_history:
831 if signature in self.digest_history:
828 raise ValueError("Duplicate Signature: %r" % signature)
832 raise ValueError("Duplicate Signature: %r" % signature)
829 self._add_digest(signature)
833 self._add_digest(signature)
830 check = self.sign(msg_list[1:5])
834 check = self.sign(msg_list[1:5])
831 if not compare_digest(signature, check):
835 if not compare_digest(signature, check):
832 raise ValueError("Invalid Signature: %r" % signature)
836 raise ValueError("Invalid Signature: %r" % signature)
833 if not len(msg_list) >= minlen:
837 if not len(msg_list) >= minlen:
834 raise TypeError("malformed message, must have at least %i elements"%minlen)
838 raise TypeError("malformed message, must have at least %i elements"%minlen)
835 header = self.unpack(msg_list[1])
839 header = self.unpack(msg_list[1])
836 message['header'] = extract_dates(header)
840 message['header'] = extract_dates(header)
837 message['msg_id'] = header['msg_id']
841 message['msg_id'] = header['msg_id']
838 message['msg_type'] = header['msg_type']
842 message['msg_type'] = header['msg_type']
839 message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
843 message['parent_header'] = extract_dates(self.unpack(msg_list[2]))
840 message['metadata'] = self.unpack(msg_list[3])
844 message['metadata'] = self.unpack(msg_list[3])
841 if content:
845 if content:
842 message['content'] = self.unpack(msg_list[4])
846 message['content'] = self.unpack(msg_list[4])
843 else:
847 else:
844 message['content'] = msg_list[4]
848 message['content'] = msg_list[4]
845
849
846 message['buffers'] = msg_list[5:]
850 message['buffers'] = msg_list[5:]
847 # adapt to the current version
851 # adapt to the current version
848 return adapt(message)
852 return adapt(message)
849
853
850 def unserialize(self, *args, **kwargs):
854 def unserialize(self, *args, **kwargs):
851 warnings.warn(
855 warnings.warn(
852 "Session.unserialize is deprecated. Use Session.deserialize.",
856 "Session.unserialize is deprecated. Use Session.deserialize.",
853 DeprecationWarning,
857 DeprecationWarning,
854 )
858 )
855 return self.deserialize(*args, **kwargs)
859 return self.deserialize(*args, **kwargs)
856
860
857
861
858 def test_msg2obj():
862 def test_msg2obj():
859 am = dict(x=1)
863 am = dict(x=1)
860 ao = Message(am)
864 ao = Message(am)
861 assert ao.x == am['x']
865 assert ao.x == am['x']
862
866
863 am['y'] = dict(z=1)
867 am['y'] = dict(z=1)
864 ao = Message(am)
868 ao = Message(am)
865 assert ao.y.z == am['y']['z']
869 assert ao.y.z == am['y']['z']
866
870
867 k1, k2 = 'y', 'z'
871 k1, k2 = 'y', 'z'
868 assert ao[k1][k2] == am[k1][k2]
872 assert ao[k1][k2] == am[k1][k2]
869
873
870 am2 = dict(ao)
874 am2 = dict(ao)
871 assert am['x'] == am2['x']
875 assert am['x'] == am2['x']
872 assert am['y']['z'] == am2['y']['z']
876 assert am['y']['z'] == am2['y']['z']
873
877
@@ -1,318 +1,318 b''
1 """test building messages with streamsession"""
1 """test building messages with Session"""
2
2
3 #-------------------------------------------------------------------------------
3 # Copyright (c) IPython Development Team.
4 # Copyright (C) 2011 The IPython Development Team
4 # Distributed under the terms of the Modified BSD License.
5 #
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
8 #-------------------------------------------------------------------------------
9
10 #-------------------------------------------------------------------------------
11 # Imports
12 #-------------------------------------------------------------------------------
13
5
6 import hmac
14 import os
7 import os
15 import uuid
8 import uuid
16 from datetime import datetime
9 from datetime import datetime
17
10
18 import zmq
11 import zmq
19
12
20 from zmq.tests import BaseZMQTestCase
13 from zmq.tests import BaseZMQTestCase
21 from zmq.eventloop.zmqstream import ZMQStream
14 from zmq.eventloop.zmqstream import ZMQStream
22
15
23 from IPython.kernel.zmq import session as ss
16 from IPython.kernel.zmq import session as ss
24
17
25 from IPython.testing.decorators import skipif, module_not_available
18 from IPython.testing.decorators import skipif, module_not_available
26 from IPython.utils.py3compat import string_types
19 from IPython.utils.py3compat import string_types
27 from IPython.utils import jsonutil
20 from IPython.utils import jsonutil
28
21
29 def _bad_packer(obj):
22 def _bad_packer(obj):
30 raise TypeError("I don't work")
23 raise TypeError("I don't work")
31
24
32 def _bad_unpacker(bytes):
25 def _bad_unpacker(bytes):
33 raise TypeError("I don't work either")
26 raise TypeError("I don't work either")
34
27
35 class SessionTestCase(BaseZMQTestCase):
28 class SessionTestCase(BaseZMQTestCase):
36
29
37 def setUp(self):
30 def setUp(self):
38 BaseZMQTestCase.setUp(self)
31 BaseZMQTestCase.setUp(self)
39 self.session = ss.Session()
32 self.session = ss.Session()
40
33
41
34
42 class TestSession(SessionTestCase):
35 class TestSession(SessionTestCase):
43
36
44 def test_msg(self):
37 def test_msg(self):
45 """message format"""
38 """message format"""
46 msg = self.session.msg('execute')
39 msg = self.session.msg('execute')
47 thekeys = set('header parent_header metadata content msg_type msg_id'.split())
40 thekeys = set('header parent_header metadata content msg_type msg_id'.split())
48 s = set(msg.keys())
41 s = set(msg.keys())
49 self.assertEqual(s, thekeys)
42 self.assertEqual(s, thekeys)
50 self.assertTrue(isinstance(msg['content'],dict))
43 self.assertTrue(isinstance(msg['content'],dict))
51 self.assertTrue(isinstance(msg['metadata'],dict))
44 self.assertTrue(isinstance(msg['metadata'],dict))
52 self.assertTrue(isinstance(msg['header'],dict))
45 self.assertTrue(isinstance(msg['header'],dict))
53 self.assertTrue(isinstance(msg['parent_header'],dict))
46 self.assertTrue(isinstance(msg['parent_header'],dict))
54 self.assertTrue(isinstance(msg['msg_id'],str))
47 self.assertTrue(isinstance(msg['msg_id'],str))
55 self.assertTrue(isinstance(msg['msg_type'],str))
48 self.assertTrue(isinstance(msg['msg_type'],str))
56 self.assertEqual(msg['header']['msg_type'], 'execute')
49 self.assertEqual(msg['header']['msg_type'], 'execute')
57 self.assertEqual(msg['msg_type'], 'execute')
50 self.assertEqual(msg['msg_type'], 'execute')
58
51
59 def test_serialize(self):
52 def test_serialize(self):
60 msg = self.session.msg('execute', content=dict(a=10, b=1.1))
53 msg = self.session.msg('execute', content=dict(a=10, b=1.1))
61 msg_list = self.session.serialize(msg, ident=b'foo')
54 msg_list = self.session.serialize(msg, ident=b'foo')
62 ident, msg_list = self.session.feed_identities(msg_list)
55 ident, msg_list = self.session.feed_identities(msg_list)
63 new_msg = self.session.deserialize(msg_list)
56 new_msg = self.session.deserialize(msg_list)
64 self.assertEqual(ident[0], b'foo')
57 self.assertEqual(ident[0], b'foo')
65 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
58 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
66 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
59 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
67 self.assertEqual(new_msg['header'],msg['header'])
60 self.assertEqual(new_msg['header'],msg['header'])
68 self.assertEqual(new_msg['content'],msg['content'])
61 self.assertEqual(new_msg['content'],msg['content'])
69 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
62 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
70 self.assertEqual(new_msg['metadata'],msg['metadata'])
63 self.assertEqual(new_msg['metadata'],msg['metadata'])
71 # ensure floats don't come out as Decimal:
64 # ensure floats don't come out as Decimal:
72 self.assertEqual(type(new_msg['content']['b']),type(new_msg['content']['b']))
65 self.assertEqual(type(new_msg['content']['b']),type(new_msg['content']['b']))
73
66
67 def test_default_secure(self):
68 self.assertIsInstance(self.session.key, bytes)
69 self.assertIsInstance(self.session.auth, hmac.HMAC)
70
74 def test_send(self):
71 def test_send(self):
75 ctx = zmq.Context.instance()
72 ctx = zmq.Context.instance()
76 A = ctx.socket(zmq.PAIR)
73 A = ctx.socket(zmq.PAIR)
77 B = ctx.socket(zmq.PAIR)
74 B = ctx.socket(zmq.PAIR)
78 A.bind("inproc://test")
75 A.bind("inproc://test")
79 B.connect("inproc://test")
76 B.connect("inproc://test")
80
77
81 msg = self.session.msg('execute', content=dict(a=10))
78 msg = self.session.msg('execute', content=dict(a=10))
82 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
79 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
83
80
84 ident, msg_list = self.session.feed_identities(B.recv_multipart())
81 ident, msg_list = self.session.feed_identities(B.recv_multipart())
85 new_msg = self.session.deserialize(msg_list)
82 new_msg = self.session.deserialize(msg_list)
86 self.assertEqual(ident[0], b'foo')
83 self.assertEqual(ident[0], b'foo')
87 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
84 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
88 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
85 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
89 self.assertEqual(new_msg['header'],msg['header'])
86 self.assertEqual(new_msg['header'],msg['header'])
90 self.assertEqual(new_msg['content'],msg['content'])
87 self.assertEqual(new_msg['content'],msg['content'])
91 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
88 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
92 self.assertEqual(new_msg['metadata'],msg['metadata'])
89 self.assertEqual(new_msg['metadata'],msg['metadata'])
93 self.assertEqual(new_msg['buffers'],[b'bar'])
90 self.assertEqual(new_msg['buffers'],[b'bar'])
94
91
95 content = msg['content']
92 content = msg['content']
96 header = msg['header']
93 header = msg['header']
94 header['date'] = datetime.now()
97 parent = msg['parent_header']
95 parent = msg['parent_header']
98 metadata = msg['metadata']
96 metadata = msg['metadata']
99 msg_type = header['msg_type']
97 msg_type = header['msg_type']
100 self.session.send(A, None, content=content, parent=parent,
98 self.session.send(A, None, content=content, parent=parent,
101 header=header, metadata=metadata, ident=b'foo', buffers=[b'bar'])
99 header=header, metadata=metadata, ident=b'foo', buffers=[b'bar'])
102 ident, msg_list = self.session.feed_identities(B.recv_multipart())
100 ident, msg_list = self.session.feed_identities(B.recv_multipart())
103 new_msg = self.session.deserialize(msg_list)
101 new_msg = self.session.deserialize(msg_list)
104 self.assertEqual(ident[0], b'foo')
102 self.assertEqual(ident[0], b'foo')
105 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
103 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
106 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
104 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
107 self.assertEqual(new_msg['header'],msg['header'])
105 self.assertEqual(new_msg['header'],msg['header'])
108 self.assertEqual(new_msg['content'],msg['content'])
106 self.assertEqual(new_msg['content'],msg['content'])
109 self.assertEqual(new_msg['metadata'],msg['metadata'])
107 self.assertEqual(new_msg['metadata'],msg['metadata'])
110 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
108 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
111 self.assertEqual(new_msg['buffers'],[b'bar'])
109 self.assertEqual(new_msg['buffers'],[b'bar'])
112
110
111 header['date'] = datetime.now()
112
113 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
113 self.session.send(A, msg, ident=b'foo', buffers=[b'bar'])
114 ident, new_msg = self.session.recv(B)
114 ident, new_msg = self.session.recv(B)
115 self.assertEqual(ident[0], b'foo')
115 self.assertEqual(ident[0], b'foo')
116 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
116 self.assertEqual(new_msg['msg_id'],msg['msg_id'])
117 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
117 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
118 self.assertEqual(new_msg['header'],msg['header'])
118 self.assertEqual(new_msg['header'],msg['header'])
119 self.assertEqual(new_msg['content'],msg['content'])
119 self.assertEqual(new_msg['content'],msg['content'])
120 self.assertEqual(new_msg['metadata'],msg['metadata'])
120 self.assertEqual(new_msg['metadata'],msg['metadata'])
121 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
121 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
122 self.assertEqual(new_msg['buffers'],[b'bar'])
122 self.assertEqual(new_msg['buffers'],[b'bar'])
123
123
124 A.close()
124 A.close()
125 B.close()
125 B.close()
126 ctx.term()
126 ctx.term()
127
127
128 def test_args(self):
128 def test_args(self):
129 """initialization arguments for Session"""
129 """initialization arguments for Session"""
130 s = self.session
130 s = self.session
131 self.assertTrue(s.pack is ss.default_packer)
131 self.assertTrue(s.pack is ss.default_packer)
132 self.assertTrue(s.unpack is ss.default_unpacker)
132 self.assertTrue(s.unpack is ss.default_unpacker)
133 self.assertEqual(s.username, os.environ.get('USER', u'username'))
133 self.assertEqual(s.username, os.environ.get('USER', u'username'))
134
134
135 s = ss.Session()
135 s = ss.Session()
136 self.assertEqual(s.username, os.environ.get('USER', u'username'))
136 self.assertEqual(s.username, os.environ.get('USER', u'username'))
137
137
138 self.assertRaises(TypeError, ss.Session, pack='hi')
138 self.assertRaises(TypeError, ss.Session, pack='hi')
139 self.assertRaises(TypeError, ss.Session, unpack='hi')
139 self.assertRaises(TypeError, ss.Session, unpack='hi')
140 u = str(uuid.uuid4())
140 u = str(uuid.uuid4())
141 s = ss.Session(username=u'carrot', session=u)
141 s = ss.Session(username=u'carrot', session=u)
142 self.assertEqual(s.session, u)
142 self.assertEqual(s.session, u)
143 self.assertEqual(s.username, u'carrot')
143 self.assertEqual(s.username, u'carrot')
144
144
145 def test_tracking(self):
145 def test_tracking(self):
146 """test tracking messages"""
146 """test tracking messages"""
147 a,b = self.create_bound_pair(zmq.PAIR, zmq.PAIR)
147 a,b = self.create_bound_pair(zmq.PAIR, zmq.PAIR)
148 s = self.session
148 s = self.session
149 s.copy_threshold = 1
149 s.copy_threshold = 1
150 stream = ZMQStream(a)
150 stream = ZMQStream(a)
151 msg = s.send(a, 'hello', track=False)
151 msg = s.send(a, 'hello', track=False)
152 self.assertTrue(msg['tracker'] is ss.DONE)
152 self.assertTrue(msg['tracker'] is ss.DONE)
153 msg = s.send(a, 'hello', track=True)
153 msg = s.send(a, 'hello', track=True)
154 self.assertTrue(isinstance(msg['tracker'], zmq.MessageTracker))
154 self.assertTrue(isinstance(msg['tracker'], zmq.MessageTracker))
155 M = zmq.Message(b'hi there', track=True)
155 M = zmq.Message(b'hi there', track=True)
156 msg = s.send(a, 'hello', buffers=[M], track=True)
156 msg = s.send(a, 'hello', buffers=[M], track=True)
157 t = msg['tracker']
157 t = msg['tracker']
158 self.assertTrue(isinstance(t, zmq.MessageTracker))
158 self.assertTrue(isinstance(t, zmq.MessageTracker))
159 self.assertRaises(zmq.NotDone, t.wait, .1)
159 self.assertRaises(zmq.NotDone, t.wait, .1)
160 del M
160 del M
161 t.wait(1) # this will raise
161 t.wait(1) # this will raise
162
162
163
163
164 def test_unique_msg_ids(self):
164 def test_unique_msg_ids(self):
165 """test that messages receive unique ids"""
165 """test that messages receive unique ids"""
166 ids = set()
166 ids = set()
167 for i in range(2**12):
167 for i in range(2**12):
168 h = self.session.msg_header('test')
168 h = self.session.msg_header('test')
169 msg_id = h['msg_id']
169 msg_id = h['msg_id']
170 self.assertTrue(msg_id not in ids)
170 self.assertTrue(msg_id not in ids)
171 ids.add(msg_id)
171 ids.add(msg_id)
172
172
173 def test_feed_identities(self):
173 def test_feed_identities(self):
174 """scrub the front for zmq IDENTITIES"""
174 """scrub the front for zmq IDENTITIES"""
175 theids = "engine client other".split()
175 theids = "engine client other".split()
176 content = dict(code='whoda',stuff=object())
176 content = dict(code='whoda',stuff=object())
177 themsg = self.session.msg('execute',content=content)
177 themsg = self.session.msg('execute',content=content)
178 pmsg = theids
178 pmsg = theids
179
179
180 def test_session_id(self):
180 def test_session_id(self):
181 session = ss.Session()
181 session = ss.Session()
182 # get bs before us
182 # get bs before us
183 bs = session.bsession
183 bs = session.bsession
184 us = session.session
184 us = session.session
185 self.assertEqual(us.encode('ascii'), bs)
185 self.assertEqual(us.encode('ascii'), bs)
186 session = ss.Session()
186 session = ss.Session()
187 # get us before bs
187 # get us before bs
188 us = session.session
188 us = session.session
189 bs = session.bsession
189 bs = session.bsession
190 self.assertEqual(us.encode('ascii'), bs)
190 self.assertEqual(us.encode('ascii'), bs)
191 # change propagates:
191 # change propagates:
192 session.session = 'something else'
192 session.session = 'something else'
193 bs = session.bsession
193 bs = session.bsession
194 us = session.session
194 us = session.session
195 self.assertEqual(us.encode('ascii'), bs)
195 self.assertEqual(us.encode('ascii'), bs)
196 session = ss.Session(session='stuff')
196 session = ss.Session(session='stuff')
197 # get us before bs
197 # get us before bs
198 self.assertEqual(session.bsession, session.session.encode('ascii'))
198 self.assertEqual(session.bsession, session.session.encode('ascii'))
199 self.assertEqual(b'stuff', session.bsession)
199 self.assertEqual(b'stuff', session.bsession)
200
200
201 def test_zero_digest_history(self):
201 def test_zero_digest_history(self):
202 session = ss.Session(digest_history_size=0)
202 session = ss.Session(digest_history_size=0)
203 for i in range(11):
203 for i in range(11):
204 session._add_digest(uuid.uuid4().bytes)
204 session._add_digest(uuid.uuid4().bytes)
205 self.assertEqual(len(session.digest_history), 0)
205 self.assertEqual(len(session.digest_history), 0)
206
206
207 def test_cull_digest_history(self):
207 def test_cull_digest_history(self):
208 session = ss.Session(digest_history_size=100)
208 session = ss.Session(digest_history_size=100)
209 for i in range(100):
209 for i in range(100):
210 session._add_digest(uuid.uuid4().bytes)
210 session._add_digest(uuid.uuid4().bytes)
211 self.assertTrue(len(session.digest_history) == 100)
211 self.assertTrue(len(session.digest_history) == 100)
212 session._add_digest(uuid.uuid4().bytes)
212 session._add_digest(uuid.uuid4().bytes)
213 self.assertTrue(len(session.digest_history) == 91)
213 self.assertTrue(len(session.digest_history) == 91)
214 for i in range(9):
214 for i in range(9):
215 session._add_digest(uuid.uuid4().bytes)
215 session._add_digest(uuid.uuid4().bytes)
216 self.assertTrue(len(session.digest_history) == 100)
216 self.assertTrue(len(session.digest_history) == 100)
217 session._add_digest(uuid.uuid4().bytes)
217 session._add_digest(uuid.uuid4().bytes)
218 self.assertTrue(len(session.digest_history) == 91)
218 self.assertTrue(len(session.digest_history) == 91)
219
219
220 def test_bad_pack(self):
220 def test_bad_pack(self):
221 try:
221 try:
222 session = ss.Session(pack=_bad_packer)
222 session = ss.Session(pack=_bad_packer)
223 except ValueError as e:
223 except ValueError as e:
224 self.assertIn("could not serialize", str(e))
224 self.assertIn("could not serialize", str(e))
225 self.assertIn("don't work", str(e))
225 self.assertIn("don't work", str(e))
226 else:
226 else:
227 self.fail("Should have raised ValueError")
227 self.fail("Should have raised ValueError")
228
228
229 def test_bad_unpack(self):
229 def test_bad_unpack(self):
230 try:
230 try:
231 session = ss.Session(unpack=_bad_unpacker)
231 session = ss.Session(unpack=_bad_unpacker)
232 except ValueError as e:
232 except ValueError as e:
233 self.assertIn("could not handle output", str(e))
233 self.assertIn("could not handle output", str(e))
234 self.assertIn("don't work either", str(e))
234 self.assertIn("don't work either", str(e))
235 else:
235 else:
236 self.fail("Should have raised ValueError")
236 self.fail("Should have raised ValueError")
237
237
238 def test_bad_packer(self):
238 def test_bad_packer(self):
239 try:
239 try:
240 session = ss.Session(packer=__name__ + '._bad_packer')
240 session = ss.Session(packer=__name__ + '._bad_packer')
241 except ValueError as e:
241 except ValueError as e:
242 self.assertIn("could not serialize", str(e))
242 self.assertIn("could not serialize", str(e))
243 self.assertIn("don't work", str(e))
243 self.assertIn("don't work", str(e))
244 else:
244 else:
245 self.fail("Should have raised ValueError")
245 self.fail("Should have raised ValueError")
246
246
247 def test_bad_unpacker(self):
247 def test_bad_unpacker(self):
248 try:
248 try:
249 session = ss.Session(unpacker=__name__ + '._bad_unpacker')
249 session = ss.Session(unpacker=__name__ + '._bad_unpacker')
250 except ValueError as e:
250 except ValueError as e:
251 self.assertIn("could not handle output", str(e))
251 self.assertIn("could not handle output", str(e))
252 self.assertIn("don't work either", str(e))
252 self.assertIn("don't work either", str(e))
253 else:
253 else:
254 self.fail("Should have raised ValueError")
254 self.fail("Should have raised ValueError")
255
255
256 def test_bad_roundtrip(self):
256 def test_bad_roundtrip(self):
257 with self.assertRaises(ValueError):
257 with self.assertRaises(ValueError):
258 session = ss.Session(unpack=lambda b: 5)
258 session = ss.Session(unpack=lambda b: 5)
259
259
260 def _datetime_test(self, session):
260 def _datetime_test(self, session):
261 content = dict(t=datetime.now())
261 content = dict(t=datetime.now())
262 metadata = dict(t=datetime.now())
262 metadata = dict(t=datetime.now())
263 p = session.msg('msg')
263 p = session.msg('msg')
264 msg = session.msg('msg', content=content, metadata=metadata, parent=p['header'])
264 msg = session.msg('msg', content=content, metadata=metadata, parent=p['header'])
265 smsg = session.serialize(msg)
265 smsg = session.serialize(msg)
266 msg2 = session.deserialize(session.feed_identities(smsg)[1])
266 msg2 = session.deserialize(session.feed_identities(smsg)[1])
267 assert isinstance(msg2['header']['date'], datetime)
267 assert isinstance(msg2['header']['date'], datetime)
268 self.assertEqual(msg['header'], msg2['header'])
268 self.assertEqual(msg['header'], msg2['header'])
269 self.assertEqual(msg['parent_header'], msg2['parent_header'])
269 self.assertEqual(msg['parent_header'], msg2['parent_header'])
270 self.assertEqual(msg['parent_header'], msg2['parent_header'])
270 self.assertEqual(msg['parent_header'], msg2['parent_header'])
271 assert isinstance(msg['content']['t'], datetime)
271 assert isinstance(msg['content']['t'], datetime)
272 assert isinstance(msg['metadata']['t'], datetime)
272 assert isinstance(msg['metadata']['t'], datetime)
273 assert isinstance(msg2['content']['t'], string_types)
273 assert isinstance(msg2['content']['t'], string_types)
274 assert isinstance(msg2['metadata']['t'], string_types)
274 assert isinstance(msg2['metadata']['t'], string_types)
275 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
275 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
276 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
276 self.assertEqual(msg['content'], jsonutil.extract_dates(msg2['content']))
277
277
278 def test_datetimes(self):
278 def test_datetimes(self):
279 self._datetime_test(self.session)
279 self._datetime_test(self.session)
280
280
281 def test_datetimes_pickle(self):
281 def test_datetimes_pickle(self):
282 session = ss.Session(packer='pickle')
282 session = ss.Session(packer='pickle')
283 self._datetime_test(session)
283 self._datetime_test(session)
284
284
285 @skipif(module_not_available('msgpack'))
285 @skipif(module_not_available('msgpack'))
286 def test_datetimes_msgpack(self):
286 def test_datetimes_msgpack(self):
287 import msgpack
287 import msgpack
288
288
289 session = ss.Session(
289 session = ss.Session(
290 pack=msgpack.packb,
290 pack=msgpack.packb,
291 unpack=lambda buf: msgpack.unpackb(buf, encoding='utf8'),
291 unpack=lambda buf: msgpack.unpackb(buf, encoding='utf8'),
292 )
292 )
293 self._datetime_test(session)
293 self._datetime_test(session)
294
294
295 def test_send_raw(self):
295 def test_send_raw(self):
296 ctx = zmq.Context.instance()
296 ctx = zmq.Context.instance()
297 A = ctx.socket(zmq.PAIR)
297 A = ctx.socket(zmq.PAIR)
298 B = ctx.socket(zmq.PAIR)
298 B = ctx.socket(zmq.PAIR)
299 A.bind("inproc://test")
299 A.bind("inproc://test")
300 B.connect("inproc://test")
300 B.connect("inproc://test")
301
301
302 msg = self.session.msg('execute', content=dict(a=10))
302 msg = self.session.msg('execute', content=dict(a=10))
303 msg_list = [self.session.pack(msg[part]) for part in
303 msg_list = [self.session.pack(msg[part]) for part in
304 ['header', 'parent_header', 'metadata', 'content']]
304 ['header', 'parent_header', 'metadata', 'content']]
305 self.session.send_raw(A, msg_list, ident=b'foo')
305 self.session.send_raw(A, msg_list, ident=b'foo')
306
306
307 ident, new_msg_list = self.session.feed_identities(B.recv_multipart())
307 ident, new_msg_list = self.session.feed_identities(B.recv_multipart())
308 new_msg = self.session.deserialize(new_msg_list)
308 new_msg = self.session.deserialize(new_msg_list)
309 self.assertEqual(ident[0], b'foo')
309 self.assertEqual(ident[0], b'foo')
310 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
310 self.assertEqual(new_msg['msg_type'],msg['msg_type'])
311 self.assertEqual(new_msg['header'],msg['header'])
311 self.assertEqual(new_msg['header'],msg['header'])
312 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
312 self.assertEqual(new_msg['parent_header'],msg['parent_header'])
313 self.assertEqual(new_msg['content'],msg['content'])
313 self.assertEqual(new_msg['content'],msg['content'])
314 self.assertEqual(new_msg['metadata'],msg['metadata'])
314 self.assertEqual(new_msg['metadata'],msg['metadata'])
315
315
316 A.close()
316 A.close()
317 B.close()
317 B.close()
318 ctx.term()
318 ctx.term()
@@ -1,547 +1,545 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3 """
3 """
4 The IPython controller application.
4 The IPython controller application.
5
5
6 Authors:
6 Authors:
7
7
8 * Brian Granger
8 * Brian Granger
9 * MinRK
9 * MinRK
10
10
11 """
11 """
12
12
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Copyright (C) 2008 The IPython Development Team
14 # Copyright (C) 2008 The IPython Development Team
15 #
15 #
16 # Distributed under the terms of the BSD License. The full license is in
16 # Distributed under the terms of the BSD License. The full license is in
17 # the file COPYING, distributed as part of this software.
17 # the file COPYING, distributed as part of this software.
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19
19
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21 # Imports
21 # Imports
22 #-----------------------------------------------------------------------------
22 #-----------------------------------------------------------------------------
23
23
24 from __future__ import with_statement
24 from __future__ import with_statement
25
25
26 import json
26 import json
27 import os
27 import os
28 import stat
28 import stat
29 import sys
29 import sys
30
30
31 from multiprocessing import Process
31 from multiprocessing import Process
32 from signal import signal, SIGINT, SIGABRT, SIGTERM
32 from signal import signal, SIGINT, SIGABRT, SIGTERM
33
33
34 import zmq
34 import zmq
35 from zmq.devices import ProcessMonitoredQueue
35 from zmq.devices import ProcessMonitoredQueue
36 from zmq.log.handlers import PUBHandler
36 from zmq.log.handlers import PUBHandler
37
37
38 from IPython.core.profiledir import ProfileDir
38 from IPython.core.profiledir import ProfileDir
39
39
40 from IPython.parallel.apps.baseapp import (
40 from IPython.parallel.apps.baseapp import (
41 BaseParallelApplication,
41 BaseParallelApplication,
42 base_aliases,
42 base_aliases,
43 base_flags,
43 base_flags,
44 catch_config_error,
44 catch_config_error,
45 )
45 )
46 from IPython.utils.importstring import import_item
46 from IPython.utils.importstring import import_item
47 from IPython.utils.localinterfaces import localhost, public_ips
47 from IPython.utils.localinterfaces import localhost, public_ips
48 from IPython.utils.traitlets import Instance, Unicode, Bool, List, Dict, TraitError
48 from IPython.utils.traitlets import Instance, Unicode, Bool, List, Dict, TraitError
49
49
50 from IPython.kernel.zmq.session import (
50 from IPython.kernel.zmq.session import (
51 Session, session_aliases, session_flags, default_secure
51 Session, session_aliases, session_flags,
52 )
52 )
53
53
54 from IPython.parallel.controller.heartmonitor import HeartMonitor
54 from IPython.parallel.controller.heartmonitor import HeartMonitor
55 from IPython.parallel.controller.hub import HubFactory
55 from IPython.parallel.controller.hub import HubFactory
56 from IPython.parallel.controller.scheduler import TaskScheduler,launch_scheduler
56 from IPython.parallel.controller.scheduler import TaskScheduler,launch_scheduler
57 from IPython.parallel.controller.dictdb import DictDB
57 from IPython.parallel.controller.dictdb import DictDB
58
58
59 from IPython.parallel.util import split_url, disambiguate_url, set_hwm
59 from IPython.parallel.util import split_url, disambiguate_url, set_hwm
60
60
61 # conditional import of SQLiteDB / MongoDB backend class
61 # conditional import of SQLiteDB / MongoDB backend class
62 real_dbs = []
62 real_dbs = []
63
63
64 try:
64 try:
65 from IPython.parallel.controller.sqlitedb import SQLiteDB
65 from IPython.parallel.controller.sqlitedb import SQLiteDB
66 except ImportError:
66 except ImportError:
67 pass
67 pass
68 else:
68 else:
69 real_dbs.append(SQLiteDB)
69 real_dbs.append(SQLiteDB)
70
70
71 try:
71 try:
72 from IPython.parallel.controller.mongodb import MongoDB
72 from IPython.parallel.controller.mongodb import MongoDB
73 except ImportError:
73 except ImportError:
74 pass
74 pass
75 else:
75 else:
76 real_dbs.append(MongoDB)
76 real_dbs.append(MongoDB)
77
77
78
78
79
79
80 #-----------------------------------------------------------------------------
80 #-----------------------------------------------------------------------------
81 # Module level variables
81 # Module level variables
82 #-----------------------------------------------------------------------------
82 #-----------------------------------------------------------------------------
83
83
84
84
85 _description = """Start the IPython controller for parallel computing.
85 _description = """Start the IPython controller for parallel computing.
86
86
87 The IPython controller provides a gateway between the IPython engines and
87 The IPython controller provides a gateway between the IPython engines and
88 clients. The controller needs to be started before the engines and can be
88 clients. The controller needs to be started before the engines and can be
89 configured using command line options or using a cluster directory. Cluster
89 configured using command line options or using a cluster directory. Cluster
90 directories contain config, log and security files and are usually located in
90 directories contain config, log and security files and are usually located in
91 your ipython directory and named as "profile_name". See the `profile`
91 your ipython directory and named as "profile_name". See the `profile`
92 and `profile-dir` options for details.
92 and `profile-dir` options for details.
93 """
93 """
94
94
95 _examples = """
95 _examples = """
96 ipcontroller --ip=192.168.0.1 --port=1000 # listen on ip, port for engines
96 ipcontroller --ip=192.168.0.1 --port=1000 # listen on ip, port for engines
97 ipcontroller --scheme=pure # use the pure zeromq scheduler
97 ipcontroller --scheme=pure # use the pure zeromq scheduler
98 """
98 """
99
99
100
100
101 #-----------------------------------------------------------------------------
101 #-----------------------------------------------------------------------------
102 # The main application
102 # The main application
103 #-----------------------------------------------------------------------------
103 #-----------------------------------------------------------------------------
104 flags = {}
104 flags = {}
105 flags.update(base_flags)
105 flags.update(base_flags)
106 flags.update({
106 flags.update({
107 'usethreads' : ( {'IPControllerApp' : {'use_threads' : True}},
107 'usethreads' : ( {'IPControllerApp' : {'use_threads' : True}},
108 'Use threads instead of processes for the schedulers'),
108 'Use threads instead of processes for the schedulers'),
109 'sqlitedb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.sqlitedb.SQLiteDB'}},
109 'sqlitedb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.sqlitedb.SQLiteDB'}},
110 'use the SQLiteDB backend'),
110 'use the SQLiteDB backend'),
111 'mongodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.mongodb.MongoDB'}},
111 'mongodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.mongodb.MongoDB'}},
112 'use the MongoDB backend'),
112 'use the MongoDB backend'),
113 'dictdb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.DictDB'}},
113 'dictdb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.DictDB'}},
114 'use the in-memory DictDB backend'),
114 'use the in-memory DictDB backend'),
115 'nodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.NoDB'}},
115 'nodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.NoDB'}},
116 """use dummy DB backend, which doesn't store any information.
116 """use dummy DB backend, which doesn't store any information.
117
117
118 This is the default as of IPython 0.13.
118 This is the default as of IPython 0.13.
119
119
120 To enable delayed or repeated retrieval of results from the Hub,
120 To enable delayed or repeated retrieval of results from the Hub,
121 select one of the true db backends.
121 select one of the true db backends.
122 """),
122 """),
123 'reuse' : ({'IPControllerApp' : {'reuse_files' : True}},
123 'reuse' : ({'IPControllerApp' : {'reuse_files' : True}},
124 'reuse existing json connection files'),
124 'reuse existing json connection files'),
125 'restore' : ({'IPControllerApp' : {'restore_engines' : True, 'reuse_files' : True}},
125 'restore' : ({'IPControllerApp' : {'restore_engines' : True, 'reuse_files' : True}},
126 'Attempt to restore engines from a JSON file. '
126 'Attempt to restore engines from a JSON file. '
127 'For use when resuming a crashed controller'),
127 'For use when resuming a crashed controller'),
128 })
128 })
129
129
130 flags.update(session_flags)
130 flags.update(session_flags)
131
131
132 aliases = dict(
132 aliases = dict(
133 ssh = 'IPControllerApp.ssh_server',
133 ssh = 'IPControllerApp.ssh_server',
134 enginessh = 'IPControllerApp.engine_ssh_server',
134 enginessh = 'IPControllerApp.engine_ssh_server',
135 location = 'IPControllerApp.location',
135 location = 'IPControllerApp.location',
136
136
137 url = 'HubFactory.url',
137 url = 'HubFactory.url',
138 ip = 'HubFactory.ip',
138 ip = 'HubFactory.ip',
139 transport = 'HubFactory.transport',
139 transport = 'HubFactory.transport',
140 port = 'HubFactory.regport',
140 port = 'HubFactory.regport',
141
141
142 ping = 'HeartMonitor.period',
142 ping = 'HeartMonitor.period',
143
143
144 scheme = 'TaskScheduler.scheme_name',
144 scheme = 'TaskScheduler.scheme_name',
145 hwm = 'TaskScheduler.hwm',
145 hwm = 'TaskScheduler.hwm',
146 )
146 )
147 aliases.update(base_aliases)
147 aliases.update(base_aliases)
148 aliases.update(session_aliases)
148 aliases.update(session_aliases)
149
149
150 class IPControllerApp(BaseParallelApplication):
150 class IPControllerApp(BaseParallelApplication):
151
151
152 name = u'ipcontroller'
152 name = u'ipcontroller'
153 description = _description
153 description = _description
154 examples = _examples
154 examples = _examples
155 classes = [ProfileDir, Session, HubFactory, TaskScheduler, HeartMonitor, DictDB] + real_dbs
155 classes = [ProfileDir, Session, HubFactory, TaskScheduler, HeartMonitor, DictDB] + real_dbs
156
156
157 # change default to True
157 # change default to True
158 auto_create = Bool(True, config=True,
158 auto_create = Bool(True, config=True,
159 help="""Whether to create profile dir if it doesn't exist.""")
159 help="""Whether to create profile dir if it doesn't exist.""")
160
160
161 reuse_files = Bool(False, config=True,
161 reuse_files = Bool(False, config=True,
162 help="""Whether to reuse existing json connection files.
162 help="""Whether to reuse existing json connection files.
163 If False, connection files will be removed on a clean exit.
163 If False, connection files will be removed on a clean exit.
164 """
164 """
165 )
165 )
166 restore_engines = Bool(False, config=True,
166 restore_engines = Bool(False, config=True,
167 help="""Reload engine state from JSON file
167 help="""Reload engine state from JSON file
168 """
168 """
169 )
169 )
170 ssh_server = Unicode(u'', config=True,
170 ssh_server = Unicode(u'', config=True,
171 help="""ssh url for clients to use when connecting to the Controller
171 help="""ssh url for clients to use when connecting to the Controller
172 processes. It should be of the form: [user@]server[:port]. The
172 processes. It should be of the form: [user@]server[:port]. The
173 Controller's listening addresses must be accessible from the ssh server""",
173 Controller's listening addresses must be accessible from the ssh server""",
174 )
174 )
175 engine_ssh_server = Unicode(u'', config=True,
175 engine_ssh_server = Unicode(u'', config=True,
176 help="""ssh url for engines to use when connecting to the Controller
176 help="""ssh url for engines to use when connecting to the Controller
177 processes. It should be of the form: [user@]server[:port]. The
177 processes. It should be of the form: [user@]server[:port]. The
178 Controller's listening addresses must be accessible from the ssh server""",
178 Controller's listening addresses must be accessible from the ssh server""",
179 )
179 )
180 location = Unicode(u'', config=True,
180 location = Unicode(u'', config=True,
181 help="""The external IP or domain name of the Controller, used for disambiguating
181 help="""The external IP or domain name of the Controller, used for disambiguating
182 engine and client connections.""",
182 engine and client connections.""",
183 )
183 )
184 import_statements = List([], config=True,
184 import_statements = List([], config=True,
185 help="import statements to be run at startup. Necessary in some environments"
185 help="import statements to be run at startup. Necessary in some environments"
186 )
186 )
187
187
188 use_threads = Bool(False, config=True,
188 use_threads = Bool(False, config=True,
189 help='Use threads instead of processes for the schedulers',
189 help='Use threads instead of processes for the schedulers',
190 )
190 )
191
191
192 engine_json_file = Unicode('ipcontroller-engine.json', config=True,
192 engine_json_file = Unicode('ipcontroller-engine.json', config=True,
193 help="JSON filename where engine connection info will be stored.")
193 help="JSON filename where engine connection info will be stored.")
194 client_json_file = Unicode('ipcontroller-client.json', config=True,
194 client_json_file = Unicode('ipcontroller-client.json', config=True,
195 help="JSON filename where client connection info will be stored.")
195 help="JSON filename where client connection info will be stored.")
196
196
197 def _cluster_id_changed(self, name, old, new):
197 def _cluster_id_changed(self, name, old, new):
198 super(IPControllerApp, self)._cluster_id_changed(name, old, new)
198 super(IPControllerApp, self)._cluster_id_changed(name, old, new)
199 self.engine_json_file = "%s-engine.json" % self.name
199 self.engine_json_file = "%s-engine.json" % self.name
200 self.client_json_file = "%s-client.json" % self.name
200 self.client_json_file = "%s-client.json" % self.name
201
201
202
202
203 # internal
203 # internal
204 children = List()
204 children = List()
205 mq_class = Unicode('zmq.devices.ProcessMonitoredQueue')
205 mq_class = Unicode('zmq.devices.ProcessMonitoredQueue')
206
206
207 def _use_threads_changed(self, name, old, new):
207 def _use_threads_changed(self, name, old, new):
208 self.mq_class = 'zmq.devices.%sMonitoredQueue'%('Thread' if new else 'Process')
208 self.mq_class = 'zmq.devices.%sMonitoredQueue'%('Thread' if new else 'Process')
209
209
210 write_connection_files = Bool(True,
210 write_connection_files = Bool(True,
211 help="""Whether to write connection files to disk.
211 help="""Whether to write connection files to disk.
212 True in all cases other than runs with `reuse_files=True` *after the first*
212 True in all cases other than runs with `reuse_files=True` *after the first*
213 """
213 """
214 )
214 )
215
215
216 aliases = Dict(aliases)
216 aliases = Dict(aliases)
217 flags = Dict(flags)
217 flags = Dict(flags)
218
218
219
219
220 def save_connection_dict(self, fname, cdict):
220 def save_connection_dict(self, fname, cdict):
221 """save a connection dict to json file."""
221 """save a connection dict to json file."""
222 c = self.config
222 c = self.config
223 url = cdict['registration']
223 url = cdict['registration']
224 location = cdict['location']
224 location = cdict['location']
225
225
226 if not location:
226 if not location:
227 if public_ips():
227 if public_ips():
228 location = public_ips()[-1]
228 location = public_ips()[-1]
229 else:
229 else:
230 self.log.warn("Could not identify this machine's IP, assuming %s."
230 self.log.warn("Could not identify this machine's IP, assuming %s."
231 " You may need to specify '--location=<external_ip_address>' to help"
231 " You may need to specify '--location=<external_ip_address>' to help"
232 " IPython decide when to connect via loopback." % localhost() )
232 " IPython decide when to connect via loopback." % localhost() )
233 location = localhost()
233 location = localhost()
234 cdict['location'] = location
234 cdict['location'] = location
235 fname = os.path.join(self.profile_dir.security_dir, fname)
235 fname = os.path.join(self.profile_dir.security_dir, fname)
236 self.log.info("writing connection info to %s", fname)
236 self.log.info("writing connection info to %s", fname)
237 with open(fname, 'w') as f:
237 with open(fname, 'w') as f:
238 f.write(json.dumps(cdict, indent=2))
238 f.write(json.dumps(cdict, indent=2))
239 os.chmod(fname, stat.S_IRUSR|stat.S_IWUSR)
239 os.chmod(fname, stat.S_IRUSR|stat.S_IWUSR)
240
240
241 def load_config_from_json(self):
241 def load_config_from_json(self):
242 """load config from existing json connector files."""
242 """load config from existing json connector files."""
243 c = self.config
243 c = self.config
244 self.log.debug("loading config from JSON")
244 self.log.debug("loading config from JSON")
245
245
246 # load engine config
246 # load engine config
247
247
248 fname = os.path.join(self.profile_dir.security_dir, self.engine_json_file)
248 fname = os.path.join(self.profile_dir.security_dir, self.engine_json_file)
249 self.log.info("loading connection info from %s", fname)
249 self.log.info("loading connection info from %s", fname)
250 with open(fname) as f:
250 with open(fname) as f:
251 ecfg = json.loads(f.read())
251 ecfg = json.loads(f.read())
252
252
253 # json gives unicode, Session.key wants bytes
253 # json gives unicode, Session.key wants bytes
254 c.Session.key = ecfg['key'].encode('ascii')
254 c.Session.key = ecfg['key'].encode('ascii')
255
255
256 xport,ip = ecfg['interface'].split('://')
256 xport,ip = ecfg['interface'].split('://')
257
257
258 c.HubFactory.engine_ip = ip
258 c.HubFactory.engine_ip = ip
259 c.HubFactory.engine_transport = xport
259 c.HubFactory.engine_transport = xport
260
260
261 self.location = ecfg['location']
261 self.location = ecfg['location']
262 if not self.engine_ssh_server:
262 if not self.engine_ssh_server:
263 self.engine_ssh_server = ecfg['ssh']
263 self.engine_ssh_server = ecfg['ssh']
264
264
265 # load client config
265 # load client config
266
266
267 fname = os.path.join(self.profile_dir.security_dir, self.client_json_file)
267 fname = os.path.join(self.profile_dir.security_dir, self.client_json_file)
268 self.log.info("loading connection info from %s", fname)
268 self.log.info("loading connection info from %s", fname)
269 with open(fname) as f:
269 with open(fname) as f:
270 ccfg = json.loads(f.read())
270 ccfg = json.loads(f.read())
271
271
272 for key in ('key', 'registration', 'pack', 'unpack', 'signature_scheme'):
272 for key in ('key', 'registration', 'pack', 'unpack', 'signature_scheme'):
273 assert ccfg[key] == ecfg[key], "mismatch between engine and client info: %r" % key
273 assert ccfg[key] == ecfg[key], "mismatch between engine and client info: %r" % key
274
274
275 xport,addr = ccfg['interface'].split('://')
275 xport,addr = ccfg['interface'].split('://')
276
276
277 c.HubFactory.client_transport = xport
277 c.HubFactory.client_transport = xport
278 c.HubFactory.client_ip = ip
278 c.HubFactory.client_ip = ip
279 if not self.ssh_server:
279 if not self.ssh_server:
280 self.ssh_server = ccfg['ssh']
280 self.ssh_server = ccfg['ssh']
281
281
282 # load port config:
282 # load port config:
283 c.HubFactory.regport = ecfg['registration']
283 c.HubFactory.regport = ecfg['registration']
284 c.HubFactory.hb = (ecfg['hb_ping'], ecfg['hb_pong'])
284 c.HubFactory.hb = (ecfg['hb_ping'], ecfg['hb_pong'])
285 c.HubFactory.control = (ccfg['control'], ecfg['control'])
285 c.HubFactory.control = (ccfg['control'], ecfg['control'])
286 c.HubFactory.mux = (ccfg['mux'], ecfg['mux'])
286 c.HubFactory.mux = (ccfg['mux'], ecfg['mux'])
287 c.HubFactory.task = (ccfg['task'], ecfg['task'])
287 c.HubFactory.task = (ccfg['task'], ecfg['task'])
288 c.HubFactory.iopub = (ccfg['iopub'], ecfg['iopub'])
288 c.HubFactory.iopub = (ccfg['iopub'], ecfg['iopub'])
289 c.HubFactory.notifier_port = ccfg['notification']
289 c.HubFactory.notifier_port = ccfg['notification']
290
290
291 def cleanup_connection_files(self):
291 def cleanup_connection_files(self):
292 if self.reuse_files:
292 if self.reuse_files:
293 self.log.debug("leaving JSON connection files for reuse")
293 self.log.debug("leaving JSON connection files for reuse")
294 return
294 return
295 self.log.debug("cleaning up JSON connection files")
295 self.log.debug("cleaning up JSON connection files")
296 for f in (self.client_json_file, self.engine_json_file):
296 for f in (self.client_json_file, self.engine_json_file):
297 f = os.path.join(self.profile_dir.security_dir, f)
297 f = os.path.join(self.profile_dir.security_dir, f)
298 try:
298 try:
299 os.remove(f)
299 os.remove(f)
300 except Exception as e:
300 except Exception as e:
301 self.log.error("Failed to cleanup connection file: %s", e)
301 self.log.error("Failed to cleanup connection file: %s", e)
302 else:
302 else:
303 self.log.debug(u"removed %s", f)
303 self.log.debug(u"removed %s", f)
304
304
305 def load_secondary_config(self):
305 def load_secondary_config(self):
306 """secondary config, loading from JSON and setting defaults"""
306 """secondary config, loading from JSON and setting defaults"""
307 if self.reuse_files:
307 if self.reuse_files:
308 try:
308 try:
309 self.load_config_from_json()
309 self.load_config_from_json()
310 except (AssertionError,IOError) as e:
310 except (AssertionError,IOError) as e:
311 self.log.error("Could not load config from JSON: %s" % e)
311 self.log.error("Could not load config from JSON: %s" % e)
312 else:
312 else:
313 # successfully loaded config from JSON, and reuse=True
313 # successfully loaded config from JSON, and reuse=True
314 # no need to wite back the same file
314 # no need to wite back the same file
315 self.write_connection_files = False
315 self.write_connection_files = False
316
316
317 # switch Session.key default to secure
318 default_secure(self.config)
319 self.log.debug("Config changed")
317 self.log.debug("Config changed")
320 self.log.debug(repr(self.config))
318 self.log.debug(repr(self.config))
321
319
322 def init_hub(self):
320 def init_hub(self):
323 c = self.config
321 c = self.config
324
322
325 self.do_import_statements()
323 self.do_import_statements()
326
324
327 try:
325 try:
328 self.factory = HubFactory(config=c, log=self.log)
326 self.factory = HubFactory(config=c, log=self.log)
329 # self.start_logging()
327 # self.start_logging()
330 self.factory.init_hub()
328 self.factory.init_hub()
331 except TraitError:
329 except TraitError:
332 raise
330 raise
333 except Exception:
331 except Exception:
334 self.log.error("Couldn't construct the Controller", exc_info=True)
332 self.log.error("Couldn't construct the Controller", exc_info=True)
335 self.exit(1)
333 self.exit(1)
336
334
337 if self.write_connection_files:
335 if self.write_connection_files:
338 # save to new json config files
336 # save to new json config files
339 f = self.factory
337 f = self.factory
340 base = {
338 base = {
341 'key' : f.session.key.decode('ascii'),
339 'key' : f.session.key.decode('ascii'),
342 'location' : self.location,
340 'location' : self.location,
343 'pack' : f.session.packer,
341 'pack' : f.session.packer,
344 'unpack' : f.session.unpacker,
342 'unpack' : f.session.unpacker,
345 'signature_scheme' : f.session.signature_scheme,
343 'signature_scheme' : f.session.signature_scheme,
346 }
344 }
347
345
348 cdict = {'ssh' : self.ssh_server}
346 cdict = {'ssh' : self.ssh_server}
349 cdict.update(f.client_info)
347 cdict.update(f.client_info)
350 cdict.update(base)
348 cdict.update(base)
351 self.save_connection_dict(self.client_json_file, cdict)
349 self.save_connection_dict(self.client_json_file, cdict)
352
350
353 edict = {'ssh' : self.engine_ssh_server}
351 edict = {'ssh' : self.engine_ssh_server}
354 edict.update(f.engine_info)
352 edict.update(f.engine_info)
355 edict.update(base)
353 edict.update(base)
356 self.save_connection_dict(self.engine_json_file, edict)
354 self.save_connection_dict(self.engine_json_file, edict)
357
355
358 fname = "engines%s.json" % self.cluster_id
356 fname = "engines%s.json" % self.cluster_id
359 self.factory.hub.engine_state_file = os.path.join(self.profile_dir.log_dir, fname)
357 self.factory.hub.engine_state_file = os.path.join(self.profile_dir.log_dir, fname)
360 if self.restore_engines:
358 if self.restore_engines:
361 self.factory.hub._load_engine_state()
359 self.factory.hub._load_engine_state()
362
360
363 def init_schedulers(self):
361 def init_schedulers(self):
364 children = self.children
362 children = self.children
365 mq = import_item(str(self.mq_class))
363 mq = import_item(str(self.mq_class))
366
364
367 f = self.factory
365 f = self.factory
368 ident = f.session.bsession
366 ident = f.session.bsession
369 # disambiguate url, in case of *
367 # disambiguate url, in case of *
370 monitor_url = disambiguate_url(f.monitor_url)
368 monitor_url = disambiguate_url(f.monitor_url)
371 # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url
369 # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url
372 # IOPub relay (in a Process)
370 # IOPub relay (in a Process)
373 q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A',b'iopub')
371 q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A',b'iopub')
374 q.bind_in(f.client_url('iopub'))
372 q.bind_in(f.client_url('iopub'))
375 q.setsockopt_in(zmq.IDENTITY, ident + b"_iopub")
373 q.setsockopt_in(zmq.IDENTITY, ident + b"_iopub")
376 q.bind_out(f.engine_url('iopub'))
374 q.bind_out(f.engine_url('iopub'))
377 q.setsockopt_out(zmq.SUBSCRIBE, b'')
375 q.setsockopt_out(zmq.SUBSCRIBE, b'')
378 q.connect_mon(monitor_url)
376 q.connect_mon(monitor_url)
379 q.daemon=True
377 q.daemon=True
380 children.append(q)
378 children.append(q)
381
379
382 # Multiplexer Queue (in a Process)
380 # Multiplexer Queue (in a Process)
383 q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out')
381 q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out')
384
382
385 q.bind_in(f.client_url('mux'))
383 q.bind_in(f.client_url('mux'))
386 q.setsockopt_in(zmq.IDENTITY, b'mux_in')
384 q.setsockopt_in(zmq.IDENTITY, b'mux_in')
387 q.bind_out(f.engine_url('mux'))
385 q.bind_out(f.engine_url('mux'))
388 q.setsockopt_out(zmq.IDENTITY, b'mux_out')
386 q.setsockopt_out(zmq.IDENTITY, b'mux_out')
389 q.connect_mon(monitor_url)
387 q.connect_mon(monitor_url)
390 q.daemon=True
388 q.daemon=True
391 children.append(q)
389 children.append(q)
392
390
393 # Control Queue (in a Process)
391 # Control Queue (in a Process)
394 q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol')
392 q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol')
395 q.bind_in(f.client_url('control'))
393 q.bind_in(f.client_url('control'))
396 q.setsockopt_in(zmq.IDENTITY, b'control_in')
394 q.setsockopt_in(zmq.IDENTITY, b'control_in')
397 q.bind_out(f.engine_url('control'))
395 q.bind_out(f.engine_url('control'))
398 q.setsockopt_out(zmq.IDENTITY, b'control_out')
396 q.setsockopt_out(zmq.IDENTITY, b'control_out')
399 q.connect_mon(monitor_url)
397 q.connect_mon(monitor_url)
400 q.daemon=True
398 q.daemon=True
401 children.append(q)
399 children.append(q)
402 if 'TaskScheduler.scheme_name' in self.config:
400 if 'TaskScheduler.scheme_name' in self.config:
403 scheme = self.config.TaskScheduler.scheme_name
401 scheme = self.config.TaskScheduler.scheme_name
404 else:
402 else:
405 scheme = TaskScheduler.scheme_name.get_default_value()
403 scheme = TaskScheduler.scheme_name.get_default_value()
406 # Task Queue (in a Process)
404 # Task Queue (in a Process)
407 if scheme == 'pure':
405 if scheme == 'pure':
408 self.log.warn("task::using pure DEALER Task scheduler")
406 self.log.warn("task::using pure DEALER Task scheduler")
409 q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask')
407 q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask')
410 # q.setsockopt_out(zmq.HWM, hub.hwm)
408 # q.setsockopt_out(zmq.HWM, hub.hwm)
411 q.bind_in(f.client_url('task'))
409 q.bind_in(f.client_url('task'))
412 q.setsockopt_in(zmq.IDENTITY, b'task_in')
410 q.setsockopt_in(zmq.IDENTITY, b'task_in')
413 q.bind_out(f.engine_url('task'))
411 q.bind_out(f.engine_url('task'))
414 q.setsockopt_out(zmq.IDENTITY, b'task_out')
412 q.setsockopt_out(zmq.IDENTITY, b'task_out')
415 q.connect_mon(monitor_url)
413 q.connect_mon(monitor_url)
416 q.daemon=True
414 q.daemon=True
417 children.append(q)
415 children.append(q)
418 elif scheme == 'none':
416 elif scheme == 'none':
419 self.log.warn("task::using no Task scheduler")
417 self.log.warn("task::using no Task scheduler")
420
418
421 else:
419 else:
422 self.log.info("task::using Python %s Task scheduler"%scheme)
420 self.log.info("task::using Python %s Task scheduler"%scheme)
423 sargs = (f.client_url('task'), f.engine_url('task'),
421 sargs = (f.client_url('task'), f.engine_url('task'),
424 monitor_url, disambiguate_url(f.client_url('notification')),
422 monitor_url, disambiguate_url(f.client_url('notification')),
425 disambiguate_url(f.client_url('registration')),
423 disambiguate_url(f.client_url('registration')),
426 )
424 )
427 kwargs = dict(logname='scheduler', loglevel=self.log_level,
425 kwargs = dict(logname='scheduler', loglevel=self.log_level,
428 log_url = self.log_url, config=dict(self.config))
426 log_url = self.log_url, config=dict(self.config))
429 if 'Process' in self.mq_class:
427 if 'Process' in self.mq_class:
430 # run the Python scheduler in a Process
428 # run the Python scheduler in a Process
431 q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs)
429 q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs)
432 q.daemon=True
430 q.daemon=True
433 children.append(q)
431 children.append(q)
434 else:
432 else:
435 # single-threaded Controller
433 # single-threaded Controller
436 kwargs['in_thread'] = True
434 kwargs['in_thread'] = True
437 launch_scheduler(*sargs, **kwargs)
435 launch_scheduler(*sargs, **kwargs)
438
436
439 # set unlimited HWM for all relay devices
437 # set unlimited HWM for all relay devices
440 if hasattr(zmq, 'SNDHWM'):
438 if hasattr(zmq, 'SNDHWM'):
441 q = children[0]
439 q = children[0]
442 q.setsockopt_in(zmq.RCVHWM, 0)
440 q.setsockopt_in(zmq.RCVHWM, 0)
443 q.setsockopt_out(zmq.SNDHWM, 0)
441 q.setsockopt_out(zmq.SNDHWM, 0)
444
442
445 for q in children[1:]:
443 for q in children[1:]:
446 if not hasattr(q, 'setsockopt_in'):
444 if not hasattr(q, 'setsockopt_in'):
447 continue
445 continue
448 q.setsockopt_in(zmq.SNDHWM, 0)
446 q.setsockopt_in(zmq.SNDHWM, 0)
449 q.setsockopt_in(zmq.RCVHWM, 0)
447 q.setsockopt_in(zmq.RCVHWM, 0)
450 q.setsockopt_out(zmq.SNDHWM, 0)
448 q.setsockopt_out(zmq.SNDHWM, 0)
451 q.setsockopt_out(zmq.RCVHWM, 0)
449 q.setsockopt_out(zmq.RCVHWM, 0)
452 q.setsockopt_mon(zmq.SNDHWM, 0)
450 q.setsockopt_mon(zmq.SNDHWM, 0)
453
451
454
452
455 def terminate_children(self):
453 def terminate_children(self):
456 child_procs = []
454 child_procs = []
457 for child in self.children:
455 for child in self.children:
458 if isinstance(child, ProcessMonitoredQueue):
456 if isinstance(child, ProcessMonitoredQueue):
459 child_procs.append(child.launcher)
457 child_procs.append(child.launcher)
460 elif isinstance(child, Process):
458 elif isinstance(child, Process):
461 child_procs.append(child)
459 child_procs.append(child)
462 if child_procs:
460 if child_procs:
463 self.log.critical("terminating children...")
461 self.log.critical("terminating children...")
464 for child in child_procs:
462 for child in child_procs:
465 try:
463 try:
466 child.terminate()
464 child.terminate()
467 except OSError:
465 except OSError:
468 # already dead
466 # already dead
469 pass
467 pass
470
468
471 def handle_signal(self, sig, frame):
469 def handle_signal(self, sig, frame):
472 self.log.critical("Received signal %i, shutting down", sig)
470 self.log.critical("Received signal %i, shutting down", sig)
473 self.terminate_children()
471 self.terminate_children()
474 self.loop.stop()
472 self.loop.stop()
475
473
476 def init_signal(self):
474 def init_signal(self):
477 for sig in (SIGINT, SIGABRT, SIGTERM):
475 for sig in (SIGINT, SIGABRT, SIGTERM):
478 signal(sig, self.handle_signal)
476 signal(sig, self.handle_signal)
479
477
480 def do_import_statements(self):
478 def do_import_statements(self):
481 statements = self.import_statements
479 statements = self.import_statements
482 for s in statements:
480 for s in statements:
483 try:
481 try:
484 self.log.msg("Executing statement: '%s'" % s)
482 self.log.msg("Executing statement: '%s'" % s)
485 exec(s, globals(), locals())
483 exec(s, globals(), locals())
486 except:
484 except:
487 self.log.msg("Error running statement: %s" % s)
485 self.log.msg("Error running statement: %s" % s)
488
486
489 def forward_logging(self):
487 def forward_logging(self):
490 if self.log_url:
488 if self.log_url:
491 self.log.info("Forwarding logging to %s"%self.log_url)
489 self.log.info("Forwarding logging to %s"%self.log_url)
492 context = zmq.Context.instance()
490 context = zmq.Context.instance()
493 lsock = context.socket(zmq.PUB)
491 lsock = context.socket(zmq.PUB)
494 lsock.connect(self.log_url)
492 lsock.connect(self.log_url)
495 handler = PUBHandler(lsock)
493 handler = PUBHandler(lsock)
496 handler.root_topic = 'controller'
494 handler.root_topic = 'controller'
497 handler.setLevel(self.log_level)
495 handler.setLevel(self.log_level)
498 self.log.addHandler(handler)
496 self.log.addHandler(handler)
499
497
500 @catch_config_error
498 @catch_config_error
501 def initialize(self, argv=None):
499 def initialize(self, argv=None):
502 super(IPControllerApp, self).initialize(argv)
500 super(IPControllerApp, self).initialize(argv)
503 self.forward_logging()
501 self.forward_logging()
504 self.load_secondary_config()
502 self.load_secondary_config()
505 self.init_hub()
503 self.init_hub()
506 self.init_schedulers()
504 self.init_schedulers()
507
505
508 def start(self):
506 def start(self):
509 # Start the subprocesses:
507 # Start the subprocesses:
510 self.factory.start()
508 self.factory.start()
511 # children must be started before signals are setup,
509 # children must be started before signals are setup,
512 # otherwise signal-handling will fire multiple times
510 # otherwise signal-handling will fire multiple times
513 for child in self.children:
511 for child in self.children:
514 child.start()
512 child.start()
515 self.init_signal()
513 self.init_signal()
516
514
517 self.write_pid_file(overwrite=True)
515 self.write_pid_file(overwrite=True)
518
516
519 try:
517 try:
520 self.factory.loop.start()
518 self.factory.loop.start()
521 except KeyboardInterrupt:
519 except KeyboardInterrupt:
522 self.log.critical("Interrupted, Exiting...\n")
520 self.log.critical("Interrupted, Exiting...\n")
523 finally:
521 finally:
524 self.cleanup_connection_files()
522 self.cleanup_connection_files()
525
523
526
524
527 def launch_new_instance(*args, **kwargs):
525 def launch_new_instance(*args, **kwargs):
528 """Create and run the IPython controller"""
526 """Create and run the IPython controller"""
529 if sys.platform == 'win32':
527 if sys.platform == 'win32':
530 # make sure we don't get called from a multiprocessing subprocess
528 # make sure we don't get called from a multiprocessing subprocess
531 # this can result in infinite Controllers being started on Windows
529 # this can result in infinite Controllers being started on Windows
532 # which doesn't have a proper fork, so multiprocessing is wonky
530 # which doesn't have a proper fork, so multiprocessing is wonky
533
531
534 # this only comes up when IPython has been installed using vanilla
532 # this only comes up when IPython has been installed using vanilla
535 # setuptools, and *not* distribute.
533 # setuptools, and *not* distribute.
536 import multiprocessing
534 import multiprocessing
537 p = multiprocessing.current_process()
535 p = multiprocessing.current_process()
538 # the main process has name 'MainProcess'
536 # the main process has name 'MainProcess'
539 # subprocesses will have names like 'Process-1'
537 # subprocesses will have names like 'Process-1'
540 if p.name != 'MainProcess':
538 if p.name != 'MainProcess':
541 # we are a subprocess, don't start another Controller!
539 # we are a subprocess, don't start another Controller!
542 return
540 return
543 return IPControllerApp.launch_instance(*args, **kwargs)
541 return IPControllerApp.launch_instance(*args, **kwargs)
544
542
545
543
546 if __name__ == '__main__':
544 if __name__ == '__main__':
547 launch_new_instance()
545 launch_new_instance()
General Comments 0
You need to be logged in to leave comments. Login now