##// END OF EJS Templates
avoid executing code in utils.localinterfaces at import time...
MinRK -
Show More
@@ -1,393 +1,393 b''
1 """ A minimal application base mixin for all ZMQ based IPython frontends.
1 """ A minimal application base mixin for all ZMQ based IPython frontends.
2
2
3 This is not a complete console app, as subprocess will not be able to receive
3 This is not a complete console app, as subprocess will not be able to receive
4 input, there is no real readline support, among other limitations. This is a
4 input, there is no real readline support, among other limitations. This is a
5 refactoring of what used to be the IPython/qt/console/qtconsoleapp.py
5 refactoring of what used to be the IPython/qt/console/qtconsoleapp.py
6
6
7 Authors:
7 Authors:
8
8
9 * Evan Patterson
9 * Evan Patterson
10 * Min RK
10 * Min RK
11 * Erik Tollerud
11 * Erik Tollerud
12 * Fernando Perez
12 * Fernando Perez
13 * Bussonnier Matthias
13 * Bussonnier Matthias
14 * Thomas Kluyver
14 * Thomas Kluyver
15 * Paul Ivanov
15 * Paul Ivanov
16
16
17 """
17 """
18
18
19 #-----------------------------------------------------------------------------
19 #-----------------------------------------------------------------------------
20 # Imports
20 # Imports
21 #-----------------------------------------------------------------------------
21 #-----------------------------------------------------------------------------
22
22
23 # stdlib imports
23 # stdlib imports
24 import atexit
24 import atexit
25 import json
25 import json
26 import os
26 import os
27 import signal
27 import signal
28 import sys
28 import sys
29 import uuid
29 import uuid
30
30
31
31
32 # Local imports
32 # Local imports
33 from IPython.config.application import boolean_flag
33 from IPython.config.application import boolean_flag
34 from IPython.core.profiledir import ProfileDir
34 from IPython.core.profiledir import ProfileDir
35 from IPython.kernel.blocking import BlockingKernelClient
35 from IPython.kernel.blocking import BlockingKernelClient
36 from IPython.kernel import KernelManager
36 from IPython.kernel import KernelManager
37 from IPython.kernel import tunnel_to_kernel, find_connection_file, swallow_argv
37 from IPython.kernel import tunnel_to_kernel, find_connection_file, swallow_argv
38 from IPython.utils.path import filefind
38 from IPython.utils.path import filefind
39 from IPython.utils.py3compat import str_to_bytes
39 from IPython.utils.py3compat import str_to_bytes
40 from IPython.utils.traitlets import (
40 from IPython.utils.traitlets import (
41 Dict, List, Unicode, CUnicode, Int, CBool, Any
41 Dict, List, Unicode, CUnicode, Int, CBool, Any
42 )
42 )
43 from IPython.kernel.zmq.kernelapp import (
43 from IPython.kernel.zmq.kernelapp import (
44 kernel_flags,
44 kernel_flags,
45 kernel_aliases,
45 kernel_aliases,
46 IPKernelApp
46 IPKernelApp
47 )
47 )
48 from IPython.kernel.zmq.session import Session, default_secure
48 from IPython.kernel.zmq.session import Session, default_secure
49 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
49 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
50 from IPython.kernel.connect import ConnectionFileMixin
50 from IPython.kernel.connect import ConnectionFileMixin
51
51
52 #-----------------------------------------------------------------------------
52 #-----------------------------------------------------------------------------
53 # Network Constants
53 # Network Constants
54 #-----------------------------------------------------------------------------
54 #-----------------------------------------------------------------------------
55
55
56 from IPython.utils.localinterfaces import LOCALHOST
56 from IPython.utils.localinterfaces import localhost
57
57
58 #-----------------------------------------------------------------------------
58 #-----------------------------------------------------------------------------
59 # Globals
59 # Globals
60 #-----------------------------------------------------------------------------
60 #-----------------------------------------------------------------------------
61
61
62
62
63 #-----------------------------------------------------------------------------
63 #-----------------------------------------------------------------------------
64 # Aliases and Flags
64 # Aliases and Flags
65 #-----------------------------------------------------------------------------
65 #-----------------------------------------------------------------------------
66
66
67 flags = dict(kernel_flags)
67 flags = dict(kernel_flags)
68
68
69 # the flags that are specific to the frontend
69 # the flags that are specific to the frontend
70 # these must be scrubbed before being passed to the kernel,
70 # these must be scrubbed before being passed to the kernel,
71 # or it will raise an error on unrecognized flags
71 # or it will raise an error on unrecognized flags
72 app_flags = {
72 app_flags = {
73 'existing' : ({'IPythonConsoleApp' : {'existing' : 'kernel*.json'}},
73 'existing' : ({'IPythonConsoleApp' : {'existing' : 'kernel*.json'}},
74 "Connect to an existing kernel. If no argument specified, guess most recent"),
74 "Connect to an existing kernel. If no argument specified, guess most recent"),
75 }
75 }
76 app_flags.update(boolean_flag(
76 app_flags.update(boolean_flag(
77 'confirm-exit', 'IPythonConsoleApp.confirm_exit',
77 'confirm-exit', 'IPythonConsoleApp.confirm_exit',
78 """Set to display confirmation dialog on exit. You can always use 'exit' or 'quit',
78 """Set to display confirmation dialog on exit. You can always use 'exit' or 'quit',
79 to force a direct exit without any confirmation.
79 to force a direct exit without any confirmation.
80 """,
80 """,
81 """Don't prompt the user when exiting. This will terminate the kernel
81 """Don't prompt the user when exiting. This will terminate the kernel
82 if it is owned by the frontend, and leave it alive if it is external.
82 if it is owned by the frontend, and leave it alive if it is external.
83 """
83 """
84 ))
84 ))
85 flags.update(app_flags)
85 flags.update(app_flags)
86
86
87 aliases = dict(kernel_aliases)
87 aliases = dict(kernel_aliases)
88
88
89 # also scrub aliases from the frontend
89 # also scrub aliases from the frontend
90 app_aliases = dict(
90 app_aliases = dict(
91 ip = 'IPythonConsoleApp.ip',
91 ip = 'IPythonConsoleApp.ip',
92 transport = 'IPythonConsoleApp.transport',
92 transport = 'IPythonConsoleApp.transport',
93 hb = 'IPythonConsoleApp.hb_port',
93 hb = 'IPythonConsoleApp.hb_port',
94 shell = 'IPythonConsoleApp.shell_port',
94 shell = 'IPythonConsoleApp.shell_port',
95 iopub = 'IPythonConsoleApp.iopub_port',
95 iopub = 'IPythonConsoleApp.iopub_port',
96 stdin = 'IPythonConsoleApp.stdin_port',
96 stdin = 'IPythonConsoleApp.stdin_port',
97 existing = 'IPythonConsoleApp.existing',
97 existing = 'IPythonConsoleApp.existing',
98 f = 'IPythonConsoleApp.connection_file',
98 f = 'IPythonConsoleApp.connection_file',
99
99
100
100
101 ssh = 'IPythonConsoleApp.sshserver',
101 ssh = 'IPythonConsoleApp.sshserver',
102 )
102 )
103 aliases.update(app_aliases)
103 aliases.update(app_aliases)
104
104
105 #-----------------------------------------------------------------------------
105 #-----------------------------------------------------------------------------
106 # Classes
106 # Classes
107 #-----------------------------------------------------------------------------
107 #-----------------------------------------------------------------------------
108
108
109 #-----------------------------------------------------------------------------
109 #-----------------------------------------------------------------------------
110 # IPythonConsole
110 # IPythonConsole
111 #-----------------------------------------------------------------------------
111 #-----------------------------------------------------------------------------
112
112
113 classes = [IPKernelApp, ZMQInteractiveShell, KernelManager, ProfileDir, Session]
113 classes = [IPKernelApp, ZMQInteractiveShell, KernelManager, ProfileDir, Session]
114
114
115 try:
115 try:
116 from IPython.kernel.zmq.pylab.backend_inline import InlineBackend
116 from IPython.kernel.zmq.pylab.backend_inline import InlineBackend
117 except ImportError:
117 except ImportError:
118 pass
118 pass
119 else:
119 else:
120 classes.append(InlineBackend)
120 classes.append(InlineBackend)
121
121
122 class IPythonConsoleApp(ConnectionFileMixin):
122 class IPythonConsoleApp(ConnectionFileMixin):
123 name = 'ipython-console-mixin'
123 name = 'ipython-console-mixin'
124
124
125 description = """
125 description = """
126 The IPython Mixin Console.
126 The IPython Mixin Console.
127
127
128 This class contains the common portions of console client (QtConsole,
128 This class contains the common portions of console client (QtConsole,
129 ZMQ-based terminal console, etc). It is not a full console, in that
129 ZMQ-based terminal console, etc). It is not a full console, in that
130 launched terminal subprocesses will not be able to accept input.
130 launched terminal subprocesses will not be able to accept input.
131
131
132 The Console using this mixing supports various extra features beyond
132 The Console using this mixing supports various extra features beyond
133 the single-process Terminal IPython shell, such as connecting to
133 the single-process Terminal IPython shell, such as connecting to
134 existing kernel, via:
134 existing kernel, via:
135
135
136 ipython <appname> --existing
136 ipython <appname> --existing
137
137
138 as well as tunnel via SSH
138 as well as tunnel via SSH
139
139
140 """
140 """
141
141
142 classes = classes
142 classes = classes
143 flags = Dict(flags)
143 flags = Dict(flags)
144 aliases = Dict(aliases)
144 aliases = Dict(aliases)
145 kernel_manager_class = KernelManager
145 kernel_manager_class = KernelManager
146 kernel_client_class = BlockingKernelClient
146 kernel_client_class = BlockingKernelClient
147
147
148 kernel_argv = List(Unicode)
148 kernel_argv = List(Unicode)
149 # frontend flags&aliases to be stripped when building kernel_argv
149 # frontend flags&aliases to be stripped when building kernel_argv
150 frontend_flags = Any(app_flags)
150 frontend_flags = Any(app_flags)
151 frontend_aliases = Any(app_aliases)
151 frontend_aliases = Any(app_aliases)
152
152
153 # create requested profiles by default, if they don't exist:
153 # create requested profiles by default, if they don't exist:
154 auto_create = CBool(True)
154 auto_create = CBool(True)
155 # connection info:
155 # connection info:
156
156
157 sshserver = Unicode('', config=True,
157 sshserver = Unicode('', config=True,
158 help="""The SSH server to use to connect to the kernel.""")
158 help="""The SSH server to use to connect to the kernel.""")
159 sshkey = Unicode('', config=True,
159 sshkey = Unicode('', config=True,
160 help="""Path to the ssh key to use for logging in to the ssh server.""")
160 help="""Path to the ssh key to use for logging in to the ssh server.""")
161
161
162 hb_port = Int(0, config=True,
162 hb_port = Int(0, config=True,
163 help="set the heartbeat port [default: random]")
163 help="set the heartbeat port [default: random]")
164 shell_port = Int(0, config=True,
164 shell_port = Int(0, config=True,
165 help="set the shell (ROUTER) port [default: random]")
165 help="set the shell (ROUTER) port [default: random]")
166 iopub_port = Int(0, config=True,
166 iopub_port = Int(0, config=True,
167 help="set the iopub (PUB) port [default: random]")
167 help="set the iopub (PUB) port [default: random]")
168 stdin_port = Int(0, config=True,
168 stdin_port = Int(0, config=True,
169 help="set the stdin (DEALER) port [default: random]")
169 help="set the stdin (DEALER) port [default: random]")
170 connection_file = Unicode('', config=True,
170 connection_file = Unicode('', config=True,
171 help="""JSON file in which to store connection info [default: kernel-<pid>.json]
171 help="""JSON file in which to store connection info [default: kernel-<pid>.json]
172
172
173 This file will contain the IP, ports, and authentication key needed to connect
173 This file will contain the IP, ports, and authentication key needed to connect
174 clients to this kernel. By default, this file will be created in the security-dir
174 clients to this kernel. By default, this file will be created in the security-dir
175 of the current profile, but can be specified by absolute path.
175 of the current profile, but can be specified by absolute path.
176 """)
176 """)
177 def _connection_file_default(self):
177 def _connection_file_default(self):
178 return 'kernel-%i.json' % os.getpid()
178 return 'kernel-%i.json' % os.getpid()
179
179
180 existing = CUnicode('', config=True,
180 existing = CUnicode('', config=True,
181 help="""Connect to an already running kernel""")
181 help="""Connect to an already running kernel""")
182
182
183 confirm_exit = CBool(True, config=True,
183 confirm_exit = CBool(True, config=True,
184 help="""
184 help="""
185 Set to display confirmation dialog on exit. You can always use 'exit' or 'quit',
185 Set to display confirmation dialog on exit. You can always use 'exit' or 'quit',
186 to force a direct exit without any confirmation.""",
186 to force a direct exit without any confirmation.""",
187 )
187 )
188
188
189
189
190 def build_kernel_argv(self, argv=None):
190 def build_kernel_argv(self, argv=None):
191 """build argv to be passed to kernel subprocess"""
191 """build argv to be passed to kernel subprocess"""
192 if argv is None:
192 if argv is None:
193 argv = sys.argv[1:]
193 argv = sys.argv[1:]
194 self.kernel_argv = swallow_argv(argv, self.frontend_aliases, self.frontend_flags)
194 self.kernel_argv = swallow_argv(argv, self.frontend_aliases, self.frontend_flags)
195 # kernel should inherit default config file from frontend
195 # kernel should inherit default config file from frontend
196 self.kernel_argv.append("--IPKernelApp.parent_appname='%s'" % self.name)
196 self.kernel_argv.append("--IPKernelApp.parent_appname='%s'" % self.name)
197
197
198 def init_connection_file(self):
198 def init_connection_file(self):
199 """find the connection file, and load the info if found.
199 """find the connection file, and load the info if found.
200
200
201 The current working directory and the current profile's security
201 The current working directory and the current profile's security
202 directory will be searched for the file if it is not given by
202 directory will be searched for the file if it is not given by
203 absolute path.
203 absolute path.
204
204
205 When attempting to connect to an existing kernel and the `--existing`
205 When attempting to connect to an existing kernel and the `--existing`
206 argument does not match an existing file, it will be interpreted as a
206 argument does not match an existing file, it will be interpreted as a
207 fileglob, and the matching file in the current profile's security dir
207 fileglob, and the matching file in the current profile's security dir
208 with the latest access time will be used.
208 with the latest access time will be used.
209
209
210 After this method is called, self.connection_file contains the *full path*
210 After this method is called, self.connection_file contains the *full path*
211 to the connection file, never just its name.
211 to the connection file, never just its name.
212 """
212 """
213 if self.existing:
213 if self.existing:
214 try:
214 try:
215 cf = find_connection_file(self.existing)
215 cf = find_connection_file(self.existing)
216 except Exception:
216 except Exception:
217 self.log.critical("Could not find existing kernel connection file %s", self.existing)
217 self.log.critical("Could not find existing kernel connection file %s", self.existing)
218 self.exit(1)
218 self.exit(1)
219 self.log.info("Connecting to existing kernel: %s" % cf)
219 self.log.info("Connecting to existing kernel: %s" % cf)
220 self.connection_file = cf
220 self.connection_file = cf
221 else:
221 else:
222 # not existing, check if we are going to write the file
222 # not existing, check if we are going to write the file
223 # and ensure that self.connection_file is a full path, not just the shortname
223 # and ensure that self.connection_file is a full path, not just the shortname
224 try:
224 try:
225 cf = find_connection_file(self.connection_file)
225 cf = find_connection_file(self.connection_file)
226 except Exception:
226 except Exception:
227 # file might not exist
227 # file might not exist
228 if self.connection_file == os.path.basename(self.connection_file):
228 if self.connection_file == os.path.basename(self.connection_file):
229 # just shortname, put it in security dir
229 # just shortname, put it in security dir
230 cf = os.path.join(self.profile_dir.security_dir, self.connection_file)
230 cf = os.path.join(self.profile_dir.security_dir, self.connection_file)
231 else:
231 else:
232 cf = self.connection_file
232 cf = self.connection_file
233 self.connection_file = cf
233 self.connection_file = cf
234
234
235 # should load_connection_file only be used for existing?
235 # should load_connection_file only be used for existing?
236 # as it is now, this allows reusing ports if an existing
236 # as it is now, this allows reusing ports if an existing
237 # file is requested
237 # file is requested
238 try:
238 try:
239 self.load_connection_file()
239 self.load_connection_file()
240 except Exception:
240 except Exception:
241 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
241 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
242 self.exit(1)
242 self.exit(1)
243
243
244 def load_connection_file(self):
244 def load_connection_file(self):
245 """load ip/port/hmac config from JSON connection file"""
245 """load ip/port/hmac config from JSON connection file"""
246 # this is identical to IPKernelApp.load_connection_file
246 # this is identical to IPKernelApp.load_connection_file
247 # perhaps it can be centralized somewhere?
247 # perhaps it can be centralized somewhere?
248 try:
248 try:
249 fname = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
249 fname = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
250 except IOError:
250 except IOError:
251 self.log.debug("Connection File not found: %s", self.connection_file)
251 self.log.debug("Connection File not found: %s", self.connection_file)
252 return
252 return
253 self.log.debug(u"Loading connection file %s", fname)
253 self.log.debug(u"Loading connection file %s", fname)
254 with open(fname) as f:
254 with open(fname) as f:
255 cfg = json.load(f)
255 cfg = json.load(f)
256 self.transport = cfg.get('transport', 'tcp')
256 self.transport = cfg.get('transport', 'tcp')
257 self.ip = cfg.get('ip', LOCALHOST)
257 self.ip = cfg.get('ip', localhost())
258
258
259 for channel in ('hb', 'shell', 'iopub', 'stdin', 'control'):
259 for channel in ('hb', 'shell', 'iopub', 'stdin', 'control'):
260 name = channel + '_port'
260 name = channel + '_port'
261 if getattr(self, name) == 0 and name in cfg:
261 if getattr(self, name) == 0 and name in cfg:
262 # not overridden by config or cl_args
262 # not overridden by config or cl_args
263 setattr(self, name, cfg[name])
263 setattr(self, name, cfg[name])
264 if 'key' in cfg:
264 if 'key' in cfg:
265 self.config.Session.key = str_to_bytes(cfg['key'])
265 self.config.Session.key = str_to_bytes(cfg['key'])
266 if 'signature_scheme' in cfg:
266 if 'signature_scheme' in cfg:
267 self.config.Session.signature_scheme = cfg['signature_scheme']
267 self.config.Session.signature_scheme = cfg['signature_scheme']
268
268
269 def init_ssh(self):
269 def init_ssh(self):
270 """set up ssh tunnels, if needed."""
270 """set up ssh tunnels, if needed."""
271 if not self.existing or (not self.sshserver and not self.sshkey):
271 if not self.existing or (not self.sshserver and not self.sshkey):
272 return
272 return
273 self.load_connection_file()
273 self.load_connection_file()
274
274
275 transport = self.transport
275 transport = self.transport
276 ip = self.ip
276 ip = self.ip
277
277
278 if transport != 'tcp':
278 if transport != 'tcp':
279 self.log.error("Can only use ssh tunnels with TCP sockets, not %s", transport)
279 self.log.error("Can only use ssh tunnels with TCP sockets, not %s", transport)
280 sys.exit(-1)
280 sys.exit(-1)
281
281
282 if self.sshkey and not self.sshserver:
282 if self.sshkey and not self.sshserver:
283 # specifying just the key implies that we are connecting directly
283 # specifying just the key implies that we are connecting directly
284 self.sshserver = ip
284 self.sshserver = ip
285 ip = LOCALHOST
285 ip = localhost()
286
286
287 # build connection dict for tunnels:
287 # build connection dict for tunnels:
288 info = dict(ip=ip,
288 info = dict(ip=ip,
289 shell_port=self.shell_port,
289 shell_port=self.shell_port,
290 iopub_port=self.iopub_port,
290 iopub_port=self.iopub_port,
291 stdin_port=self.stdin_port,
291 stdin_port=self.stdin_port,
292 hb_port=self.hb_port
292 hb_port=self.hb_port
293 )
293 )
294
294
295 self.log.info("Forwarding connections to %s via %s"%(ip, self.sshserver))
295 self.log.info("Forwarding connections to %s via %s"%(ip, self.sshserver))
296
296
297 # tunnels return a new set of ports, which will be on localhost:
297 # tunnels return a new set of ports, which will be on localhost:
298 self.ip = LOCALHOST
298 self.ip = localhost()
299 try:
299 try:
300 newports = tunnel_to_kernel(info, self.sshserver, self.sshkey)
300 newports = tunnel_to_kernel(info, self.sshserver, self.sshkey)
301 except:
301 except:
302 # even catch KeyboardInterrupt
302 # even catch KeyboardInterrupt
303 self.log.error("Could not setup tunnels", exc_info=True)
303 self.log.error("Could not setup tunnels", exc_info=True)
304 self.exit(1)
304 self.exit(1)
305
305
306 self.shell_port, self.iopub_port, self.stdin_port, self.hb_port = newports
306 self.shell_port, self.iopub_port, self.stdin_port, self.hb_port = newports
307
307
308 cf = self.connection_file
308 cf = self.connection_file
309 base,ext = os.path.splitext(cf)
309 base,ext = os.path.splitext(cf)
310 base = os.path.basename(base)
310 base = os.path.basename(base)
311 self.connection_file = os.path.basename(base)+'-ssh'+ext
311 self.connection_file = os.path.basename(base)+'-ssh'+ext
312 self.log.critical("To connect another client via this tunnel, use:")
312 self.log.critical("To connect another client via this tunnel, use:")
313 self.log.critical("--existing %s" % self.connection_file)
313 self.log.critical("--existing %s" % self.connection_file)
314
314
315 def _new_connection_file(self):
315 def _new_connection_file(self):
316 cf = ''
316 cf = ''
317 while not cf:
317 while not cf:
318 # we don't need a 128b id to distinguish kernels, use more readable
318 # we don't need a 128b id to distinguish kernels, use more readable
319 # 48b node segment (12 hex chars). Users running more than 32k simultaneous
319 # 48b node segment (12 hex chars). Users running more than 32k simultaneous
320 # kernels can subclass.
320 # kernels can subclass.
321 ident = str(uuid.uuid4()).split('-')[-1]
321 ident = str(uuid.uuid4()).split('-')[-1]
322 cf = os.path.join(self.profile_dir.security_dir, 'kernel-%s.json' % ident)
322 cf = os.path.join(self.profile_dir.security_dir, 'kernel-%s.json' % ident)
323 # only keep if it's actually new. Protect against unlikely collision
323 # only keep if it's actually new. Protect against unlikely collision
324 # in 48b random search space
324 # in 48b random search space
325 cf = cf if not os.path.exists(cf) else ''
325 cf = cf if not os.path.exists(cf) else ''
326 return cf
326 return cf
327
327
328 def init_kernel_manager(self):
328 def init_kernel_manager(self):
329 # Don't let Qt or ZMQ swallow KeyboardInterupts.
329 # Don't let Qt or ZMQ swallow KeyboardInterupts.
330 if self.existing:
330 if self.existing:
331 self.kernel_manager = None
331 self.kernel_manager = None
332 return
332 return
333 signal.signal(signal.SIGINT, signal.SIG_DFL)
333 signal.signal(signal.SIGINT, signal.SIG_DFL)
334
334
335 # Create a KernelManager and start a kernel.
335 # Create a KernelManager and start a kernel.
336 self.kernel_manager = self.kernel_manager_class(
336 self.kernel_manager = self.kernel_manager_class(
337 ip=self.ip,
337 ip=self.ip,
338 transport=self.transport,
338 transport=self.transport,
339 shell_port=self.shell_port,
339 shell_port=self.shell_port,
340 iopub_port=self.iopub_port,
340 iopub_port=self.iopub_port,
341 stdin_port=self.stdin_port,
341 stdin_port=self.stdin_port,
342 hb_port=self.hb_port,
342 hb_port=self.hb_port,
343 connection_file=self.connection_file,
343 connection_file=self.connection_file,
344 parent=self,
344 parent=self,
345 )
345 )
346 self.kernel_manager.client_factory = self.kernel_client_class
346 self.kernel_manager.client_factory = self.kernel_client_class
347 self.kernel_manager.start_kernel(extra_arguments=self.kernel_argv)
347 self.kernel_manager.start_kernel(extra_arguments=self.kernel_argv)
348 atexit.register(self.kernel_manager.cleanup_ipc_files)
348 atexit.register(self.kernel_manager.cleanup_ipc_files)
349
349
350 if self.sshserver:
350 if self.sshserver:
351 # ssh, write new connection file
351 # ssh, write new connection file
352 self.kernel_manager.write_connection_file()
352 self.kernel_manager.write_connection_file()
353
353
354 # in case KM defaults / ssh writing changes things:
354 # in case KM defaults / ssh writing changes things:
355 km = self.kernel_manager
355 km = self.kernel_manager
356 self.shell_port=km.shell_port
356 self.shell_port=km.shell_port
357 self.iopub_port=km.iopub_port
357 self.iopub_port=km.iopub_port
358 self.stdin_port=km.stdin_port
358 self.stdin_port=km.stdin_port
359 self.hb_port=km.hb_port
359 self.hb_port=km.hb_port
360 self.connection_file = km.connection_file
360 self.connection_file = km.connection_file
361
361
362 atexit.register(self.kernel_manager.cleanup_connection_file)
362 atexit.register(self.kernel_manager.cleanup_connection_file)
363
363
364 def init_kernel_client(self):
364 def init_kernel_client(self):
365 if self.kernel_manager is not None:
365 if self.kernel_manager is not None:
366 self.kernel_client = self.kernel_manager.client()
366 self.kernel_client = self.kernel_manager.client()
367 else:
367 else:
368 self.kernel_client = self.kernel_client_class(
368 self.kernel_client = self.kernel_client_class(
369 ip=self.ip,
369 ip=self.ip,
370 transport=self.transport,
370 transport=self.transport,
371 shell_port=self.shell_port,
371 shell_port=self.shell_port,
372 iopub_port=self.iopub_port,
372 iopub_port=self.iopub_port,
373 stdin_port=self.stdin_port,
373 stdin_port=self.stdin_port,
374 hb_port=self.hb_port,
374 hb_port=self.hb_port,
375 connection_file=self.connection_file,
375 connection_file=self.connection_file,
376 parent=self,
376 parent=self,
377 )
377 )
378
378
379 self.kernel_client.start_channels()
379 self.kernel_client.start_channels()
380
380
381
381
382
382
383 def initialize(self, argv=None):
383 def initialize(self, argv=None):
384 """
384 """
385 Classes which mix this class in should call:
385 Classes which mix this class in should call:
386 IPythonConsoleApp.initialize(self,argv)
386 IPythonConsoleApp.initialize(self,argv)
387 """
387 """
388 self.init_connection_file()
388 self.init_connection_file()
389 default_secure(self.config)
389 default_secure(self.config)
390 self.init_ssh()
390 self.init_ssh()
391 self.init_kernel_manager()
391 self.init_kernel_manager()
392 self.init_kernel_client()
392 self.init_kernel_client()
393
393
@@ -1,726 +1,728 b''
1 # coding: utf-8
1 # coding: utf-8
2 """A tornado based IPython notebook server.
2 """A tornado based IPython notebook server.
3
3
4 Authors:
4 Authors:
5
5
6 * Brian Granger
6 * Brian Granger
7 """
7 """
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9 # Copyright (C) 2013 The IPython Development Team
9 # Copyright (C) 2013 The IPython Development Team
10 #
10 #
11 # Distributed under the terms of the BSD License. The full license is in
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
12 # the file COPYING, distributed as part of this software.
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-----------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18
18
19 # stdlib
19 # stdlib
20 import errno
20 import errno
21 import logging
21 import logging
22 import os
22 import os
23 import random
23 import random
24 import select
24 import select
25 import signal
25 import signal
26 import socket
26 import socket
27 import sys
27 import sys
28 import threading
28 import threading
29 import time
29 import time
30 import webbrowser
30 import webbrowser
31
31
32
32
33 # Third party
33 # Third party
34 # check for pyzmq 2.1.11
34 # check for pyzmq 2.1.11
35 from IPython.utils.zmqrelated import check_for_zmq
35 from IPython.utils.zmqrelated import check_for_zmq
36 check_for_zmq('2.1.11', 'IPython.html')
36 check_for_zmq('2.1.11', 'IPython.html')
37
37
38 from jinja2 import Environment, FileSystemLoader
38 from jinja2 import Environment, FileSystemLoader
39
39
40 # Install the pyzmq ioloop. This has to be done before anything else from
40 # Install the pyzmq ioloop. This has to be done before anything else from
41 # tornado is imported.
41 # tornado is imported.
42 from zmq.eventloop import ioloop
42 from zmq.eventloop import ioloop
43 ioloop.install()
43 ioloop.install()
44
44
45 # check for tornado 2.1.0
45 # check for tornado 2.1.0
46 msg = "The IPython Notebook requires tornado >= 2.1.0"
46 msg = "The IPython Notebook requires tornado >= 2.1.0"
47 try:
47 try:
48 import tornado
48 import tornado
49 except ImportError:
49 except ImportError:
50 raise ImportError(msg)
50 raise ImportError(msg)
51 try:
51 try:
52 version_info = tornado.version_info
52 version_info = tornado.version_info
53 except AttributeError:
53 except AttributeError:
54 raise ImportError(msg + ", but you have < 1.1.0")
54 raise ImportError(msg + ", but you have < 1.1.0")
55 if version_info < (2,1,0):
55 if version_info < (2,1,0):
56 raise ImportError(msg + ", but you have %s" % tornado.version)
56 raise ImportError(msg + ", but you have %s" % tornado.version)
57
57
58 from tornado import httpserver
58 from tornado import httpserver
59 from tornado import web
59 from tornado import web
60
60
61 # Our own libraries
61 # Our own libraries
62 from IPython.html import DEFAULT_STATIC_FILES_PATH
62 from IPython.html import DEFAULT_STATIC_FILES_PATH
63
63
64 from .services.kernels.kernelmanager import MappingKernelManager
64 from .services.kernels.kernelmanager import MappingKernelManager
65 from .services.notebooks.nbmanager import NotebookManager
65 from .services.notebooks.nbmanager import NotebookManager
66 from .services.notebooks.filenbmanager import FileNotebookManager
66 from .services.notebooks.filenbmanager import FileNotebookManager
67 from .services.clusters.clustermanager import ClusterManager
67 from .services.clusters.clustermanager import ClusterManager
68
68
69 from .base.handlers import AuthenticatedFileHandler, FileFindHandler
69 from .base.handlers import AuthenticatedFileHandler, FileFindHandler
70
70
71 from IPython.config.application import catch_config_error, boolean_flag
71 from IPython.config.application import catch_config_error, boolean_flag
72 from IPython.core.application import BaseIPythonApplication
72 from IPython.core.application import BaseIPythonApplication
73 from IPython.consoleapp import IPythonConsoleApp
73 from IPython.consoleapp import IPythonConsoleApp
74 from IPython.kernel import swallow_argv
74 from IPython.kernel import swallow_argv
75 from IPython.kernel.zmq.session import default_secure
75 from IPython.kernel.zmq.session import default_secure
76 from IPython.kernel.zmq.kernelapp import (
76 from IPython.kernel.zmq.kernelapp import (
77 kernel_flags,
77 kernel_flags,
78 kernel_aliases,
78 kernel_aliases,
79 )
79 )
80 from IPython.utils.importstring import import_item
80 from IPython.utils.importstring import import_item
81 from IPython.utils.localinterfaces import LOCALHOST
81 from IPython.utils.localinterfaces import localhost
82 from IPython.utils import submodule
82 from IPython.utils import submodule
83 from IPython.utils.traitlets import (
83 from IPython.utils.traitlets import (
84 Dict, Unicode, Integer, List, Bool, Bytes,
84 Dict, Unicode, Integer, List, Bool, Bytes,
85 DottedObjectName
85 DottedObjectName
86 )
86 )
87 from IPython.utils import py3compat
87 from IPython.utils import py3compat
88 from IPython.utils.path import filefind
88 from IPython.utils.path import filefind
89
89
90 from .utils import url_path_join
90 from .utils import url_path_join
91
91
92 #-----------------------------------------------------------------------------
92 #-----------------------------------------------------------------------------
93 # Module globals
93 # Module globals
94 #-----------------------------------------------------------------------------
94 #-----------------------------------------------------------------------------
95
95
96 _examples = """
96 _examples = """
97 ipython notebook # start the notebook
97 ipython notebook # start the notebook
98 ipython notebook --profile=sympy # use the sympy profile
98 ipython notebook --profile=sympy # use the sympy profile
99 ipython notebook --certfile=mycert.pem # use SSL/TLS certificate
99 ipython notebook --certfile=mycert.pem # use SSL/TLS certificate
100 """
100 """
101
101
102 #-----------------------------------------------------------------------------
102 #-----------------------------------------------------------------------------
103 # Helper functions
103 # Helper functions
104 #-----------------------------------------------------------------------------
104 #-----------------------------------------------------------------------------
105
105
106 def random_ports(port, n):
106 def random_ports(port, n):
107 """Generate a list of n random ports near the given port.
107 """Generate a list of n random ports near the given port.
108
108
109 The first 5 ports will be sequential, and the remaining n-5 will be
109 The first 5 ports will be sequential, and the remaining n-5 will be
110 randomly selected in the range [port-2*n, port+2*n].
110 randomly selected in the range [port-2*n, port+2*n].
111 """
111 """
112 for i in range(min(5, n)):
112 for i in range(min(5, n)):
113 yield port + i
113 yield port + i
114 for i in range(n-5):
114 for i in range(n-5):
115 yield port + random.randint(-2*n, 2*n)
115 yield port + random.randint(-2*n, 2*n)
116
116
117 def load_handlers(name):
117 def load_handlers(name):
118 """Load the (URL pattern, handler) tuples for each component."""
118 """Load the (URL pattern, handler) tuples for each component."""
119 name = 'IPython.html.' + name
119 name = 'IPython.html.' + name
120 mod = __import__(name, fromlist=['default_handlers'])
120 mod = __import__(name, fromlist=['default_handlers'])
121 return mod.default_handlers
121 return mod.default_handlers
122
122
123 #-----------------------------------------------------------------------------
123 #-----------------------------------------------------------------------------
124 # The Tornado web application
124 # The Tornado web application
125 #-----------------------------------------------------------------------------
125 #-----------------------------------------------------------------------------
126
126
127 class NotebookWebApplication(web.Application):
127 class NotebookWebApplication(web.Application):
128
128
129 def __init__(self, ipython_app, kernel_manager, notebook_manager,
129 def __init__(self, ipython_app, kernel_manager, notebook_manager,
130 cluster_manager, log,
130 cluster_manager, log,
131 base_project_url, settings_overrides):
131 base_project_url, settings_overrides):
132
132
133 settings = self.init_settings(
133 settings = self.init_settings(
134 ipython_app, kernel_manager, notebook_manager, cluster_manager,
134 ipython_app, kernel_manager, notebook_manager, cluster_manager,
135 log, base_project_url, settings_overrides)
135 log, base_project_url, settings_overrides)
136 handlers = self.init_handlers(settings)
136 handlers = self.init_handlers(settings)
137
137
138 super(NotebookWebApplication, self).__init__(handlers, **settings)
138 super(NotebookWebApplication, self).__init__(handlers, **settings)
139
139
140 def init_settings(self, ipython_app, kernel_manager, notebook_manager,
140 def init_settings(self, ipython_app, kernel_manager, notebook_manager,
141 cluster_manager, log,
141 cluster_manager, log,
142 base_project_url, settings_overrides):
142 base_project_url, settings_overrides):
143 # Python < 2.6.5 doesn't accept unicode keys in f(**kwargs), and
143 # Python < 2.6.5 doesn't accept unicode keys in f(**kwargs), and
144 # base_project_url will always be unicode, which will in turn
144 # base_project_url will always be unicode, which will in turn
145 # make the patterns unicode, and ultimately result in unicode
145 # make the patterns unicode, and ultimately result in unicode
146 # keys in kwargs to handler._execute(**kwargs) in tornado.
146 # keys in kwargs to handler._execute(**kwargs) in tornado.
147 # This enforces that base_project_url be ascii in that situation.
147 # This enforces that base_project_url be ascii in that situation.
148 #
148 #
149 # Note that the URLs these patterns check against are escaped,
149 # Note that the URLs these patterns check against are escaped,
150 # and thus guaranteed to be ASCII: 'hΓ©llo' is really 'h%C3%A9llo'.
150 # and thus guaranteed to be ASCII: 'hΓ©llo' is really 'h%C3%A9llo'.
151 base_project_url = py3compat.unicode_to_str(base_project_url, 'ascii')
151 base_project_url = py3compat.unicode_to_str(base_project_url, 'ascii')
152 template_path = settings_overrides.get("template_path", os.path.join(os.path.dirname(__file__), "templates"))
152 template_path = settings_overrides.get("template_path", os.path.join(os.path.dirname(__file__), "templates"))
153 settings = dict(
153 settings = dict(
154 # basics
154 # basics
155 base_project_url=base_project_url,
155 base_project_url=base_project_url,
156 base_kernel_url=ipython_app.base_kernel_url,
156 base_kernel_url=ipython_app.base_kernel_url,
157 template_path=template_path,
157 template_path=template_path,
158 static_path=ipython_app.static_file_path,
158 static_path=ipython_app.static_file_path,
159 static_handler_class = FileFindHandler,
159 static_handler_class = FileFindHandler,
160 static_url_prefix = url_path_join(base_project_url,'/static/'),
160 static_url_prefix = url_path_join(base_project_url,'/static/'),
161
161
162 # authentication
162 # authentication
163 cookie_secret=ipython_app.cookie_secret,
163 cookie_secret=ipython_app.cookie_secret,
164 login_url=url_path_join(base_project_url,'/login'),
164 login_url=url_path_join(base_project_url,'/login'),
165 password=ipython_app.password,
165 password=ipython_app.password,
166
166
167 # managers
167 # managers
168 kernel_manager=kernel_manager,
168 kernel_manager=kernel_manager,
169 notebook_manager=notebook_manager,
169 notebook_manager=notebook_manager,
170 cluster_manager=cluster_manager,
170 cluster_manager=cluster_manager,
171
171
172 # IPython stuff
172 # IPython stuff
173 mathjax_url=ipython_app.mathjax_url,
173 mathjax_url=ipython_app.mathjax_url,
174 config=ipython_app.config,
174 config=ipython_app.config,
175 use_less=ipython_app.use_less,
175 use_less=ipython_app.use_less,
176 jinja2_env=Environment(loader=FileSystemLoader(template_path)),
176 jinja2_env=Environment(loader=FileSystemLoader(template_path)),
177 )
177 )
178
178
179 # allow custom overrides for the tornado web app.
179 # allow custom overrides for the tornado web app.
180 settings.update(settings_overrides)
180 settings.update(settings_overrides)
181 return settings
181 return settings
182
182
183 def init_handlers(self, settings):
183 def init_handlers(self, settings):
184 # Load the (URL pattern, handler) tuples for each component.
184 # Load the (URL pattern, handler) tuples for each component.
185 handlers = []
185 handlers = []
186 handlers.extend(load_handlers('base.handlers'))
186 handlers.extend(load_handlers('base.handlers'))
187 handlers.extend(load_handlers('tree.handlers'))
187 handlers.extend(load_handlers('tree.handlers'))
188 handlers.extend(load_handlers('auth.login'))
188 handlers.extend(load_handlers('auth.login'))
189 handlers.extend(load_handlers('auth.logout'))
189 handlers.extend(load_handlers('auth.logout'))
190 handlers.extend(load_handlers('notebook.handlers'))
190 handlers.extend(load_handlers('notebook.handlers'))
191 handlers.extend(load_handlers('services.kernels.handlers'))
191 handlers.extend(load_handlers('services.kernels.handlers'))
192 handlers.extend(load_handlers('services.notebooks.handlers'))
192 handlers.extend(load_handlers('services.notebooks.handlers'))
193 handlers.extend(load_handlers('services.clusters.handlers'))
193 handlers.extend(load_handlers('services.clusters.handlers'))
194 handlers.extend([
194 handlers.extend([
195 (r"/files/(.*)", AuthenticatedFileHandler, {'path' : settings['notebook_manager'].notebook_dir}),
195 (r"/files/(.*)", AuthenticatedFileHandler, {'path' : settings['notebook_manager'].notebook_dir}),
196 ])
196 ])
197 # prepend base_project_url onto the patterns that we match
197 # prepend base_project_url onto the patterns that we match
198 new_handlers = []
198 new_handlers = []
199 for handler in handlers:
199 for handler in handlers:
200 pattern = url_path_join(settings['base_project_url'], handler[0])
200 pattern = url_path_join(settings['base_project_url'], handler[0])
201 new_handler = tuple([pattern] + list(handler[1:]))
201 new_handler = tuple([pattern] + list(handler[1:]))
202 new_handlers.append(new_handler)
202 new_handlers.append(new_handler)
203 return new_handlers
203 return new_handlers
204
204
205
205
206
206
207 #-----------------------------------------------------------------------------
207 #-----------------------------------------------------------------------------
208 # Aliases and Flags
208 # Aliases and Flags
209 #-----------------------------------------------------------------------------
209 #-----------------------------------------------------------------------------
210
210
211 flags = dict(kernel_flags)
211 flags = dict(kernel_flags)
212 flags['no-browser']=(
212 flags['no-browser']=(
213 {'NotebookApp' : {'open_browser' : False}},
213 {'NotebookApp' : {'open_browser' : False}},
214 "Don't open the notebook in a browser after startup."
214 "Don't open the notebook in a browser after startup."
215 )
215 )
216 flags['no-mathjax']=(
216 flags['no-mathjax']=(
217 {'NotebookApp' : {'enable_mathjax' : False}},
217 {'NotebookApp' : {'enable_mathjax' : False}},
218 """Disable MathJax
218 """Disable MathJax
219
219
220 MathJax is the javascript library IPython uses to render math/LaTeX. It is
220 MathJax is the javascript library IPython uses to render math/LaTeX. It is
221 very large, so you may want to disable it if you have a slow internet
221 very large, so you may want to disable it if you have a slow internet
222 connection, or for offline use of the notebook.
222 connection, or for offline use of the notebook.
223
223
224 When disabled, equations etc. will appear as their untransformed TeX source.
224 When disabled, equations etc. will appear as their untransformed TeX source.
225 """
225 """
226 )
226 )
227
227
228 # Add notebook manager flags
228 # Add notebook manager flags
229 flags.update(boolean_flag('script', 'FileNotebookManager.save_script',
229 flags.update(boolean_flag('script', 'FileNotebookManager.save_script',
230 'Auto-save a .py script everytime the .ipynb notebook is saved',
230 'Auto-save a .py script everytime the .ipynb notebook is saved',
231 'Do not auto-save .py scripts for every notebook'))
231 'Do not auto-save .py scripts for every notebook'))
232
232
233 # the flags that are specific to the frontend
233 # the flags that are specific to the frontend
234 # these must be scrubbed before being passed to the kernel,
234 # these must be scrubbed before being passed to the kernel,
235 # or it will raise an error on unrecognized flags
235 # or it will raise an error on unrecognized flags
236 notebook_flags = ['no-browser', 'no-mathjax', 'script', 'no-script']
236 notebook_flags = ['no-browser', 'no-mathjax', 'script', 'no-script']
237
237
238 aliases = dict(kernel_aliases)
238 aliases = dict(kernel_aliases)
239
239
240 aliases.update({
240 aliases.update({
241 'ip': 'NotebookApp.ip',
241 'ip': 'NotebookApp.ip',
242 'port': 'NotebookApp.port',
242 'port': 'NotebookApp.port',
243 'port-retries': 'NotebookApp.port_retries',
243 'port-retries': 'NotebookApp.port_retries',
244 'transport': 'KernelManager.transport',
244 'transport': 'KernelManager.transport',
245 'keyfile': 'NotebookApp.keyfile',
245 'keyfile': 'NotebookApp.keyfile',
246 'certfile': 'NotebookApp.certfile',
246 'certfile': 'NotebookApp.certfile',
247 'notebook-dir': 'NotebookManager.notebook_dir',
247 'notebook-dir': 'NotebookManager.notebook_dir',
248 'browser': 'NotebookApp.browser',
248 'browser': 'NotebookApp.browser',
249 })
249 })
250
250
251 # remove ipkernel flags that are singletons, and don't make sense in
251 # remove ipkernel flags that are singletons, and don't make sense in
252 # multi-kernel evironment:
252 # multi-kernel evironment:
253 aliases.pop('f', None)
253 aliases.pop('f', None)
254
254
255 notebook_aliases = [u'port', u'port-retries', u'ip', u'keyfile', u'certfile',
255 notebook_aliases = [u'port', u'port-retries', u'ip', u'keyfile', u'certfile',
256 u'notebook-dir', u'profile', u'profile-dir']
256 u'notebook-dir', u'profile', u'profile-dir']
257
257
258 #-----------------------------------------------------------------------------
258 #-----------------------------------------------------------------------------
259 # NotebookApp
259 # NotebookApp
260 #-----------------------------------------------------------------------------
260 #-----------------------------------------------------------------------------
261
261
262 class NotebookApp(BaseIPythonApplication):
262 class NotebookApp(BaseIPythonApplication):
263
263
264 name = 'ipython-notebook'
264 name = 'ipython-notebook'
265
265
266 description = """
266 description = """
267 The IPython HTML Notebook.
267 The IPython HTML Notebook.
268
268
269 This launches a Tornado based HTML Notebook Server that serves up an
269 This launches a Tornado based HTML Notebook Server that serves up an
270 HTML5/Javascript Notebook client.
270 HTML5/Javascript Notebook client.
271 """
271 """
272 examples = _examples
272 examples = _examples
273
273
274 classes = IPythonConsoleApp.classes + [MappingKernelManager, NotebookManager,
274 classes = IPythonConsoleApp.classes + [MappingKernelManager, NotebookManager,
275 FileNotebookManager]
275 FileNotebookManager]
276 flags = Dict(flags)
276 flags = Dict(flags)
277 aliases = Dict(aliases)
277 aliases = Dict(aliases)
278
278
279 kernel_argv = List(Unicode)
279 kernel_argv = List(Unicode)
280
280
281 def _log_level_default(self):
281 def _log_level_default(self):
282 return logging.INFO
282 return logging.INFO
283
283
284 def _log_format_default(self):
284 def _log_format_default(self):
285 """override default log format to include time"""
285 """override default log format to include time"""
286 return u"%(asctime)s.%(msecs).03d [%(name)s]%(highlevel)s %(message)s"
286 return u"%(asctime)s.%(msecs).03d [%(name)s]%(highlevel)s %(message)s"
287
287
288 # create requested profiles by default, if they don't exist:
288 # create requested profiles by default, if they don't exist:
289 auto_create = Bool(True)
289 auto_create = Bool(True)
290
290
291 # file to be opened in the notebook server
291 # file to be opened in the notebook server
292 file_to_run = Unicode('')
292 file_to_run = Unicode('')
293
293
294 # Network related information.
294 # Network related information.
295
295
296 ip = Unicode(LOCALHOST, config=True,
296 ip = Unicode(config=True,
297 help="The IP address the notebook server will listen on."
297 help="The IP address the notebook server will listen on."
298 )
298 )
299 def _ip_default(self):
300 return localhost()
299
301
300 def _ip_changed(self, name, old, new):
302 def _ip_changed(self, name, old, new):
301 if new == u'*': self.ip = u''
303 if new == u'*': self.ip = u''
302
304
303 port = Integer(8888, config=True,
305 port = Integer(8888, config=True,
304 help="The port the notebook server will listen on."
306 help="The port the notebook server will listen on."
305 )
307 )
306 port_retries = Integer(50, config=True,
308 port_retries = Integer(50, config=True,
307 help="The number of additional ports to try if the specified port is not available."
309 help="The number of additional ports to try if the specified port is not available."
308 )
310 )
309
311
310 certfile = Unicode(u'', config=True,
312 certfile = Unicode(u'', config=True,
311 help="""The full path to an SSL/TLS certificate file."""
313 help="""The full path to an SSL/TLS certificate file."""
312 )
314 )
313
315
314 keyfile = Unicode(u'', config=True,
316 keyfile = Unicode(u'', config=True,
315 help="""The full path to a private key file for usage with SSL/TLS."""
317 help="""The full path to a private key file for usage with SSL/TLS."""
316 )
318 )
317
319
318 cookie_secret = Bytes(b'', config=True,
320 cookie_secret = Bytes(b'', config=True,
319 help="""The random bytes used to secure cookies.
321 help="""The random bytes used to secure cookies.
320 By default this is a new random number every time you start the Notebook.
322 By default this is a new random number every time you start the Notebook.
321 Set it to a value in a config file to enable logins to persist across server sessions.
323 Set it to a value in a config file to enable logins to persist across server sessions.
322
324
323 Note: Cookie secrets should be kept private, do not share config files with
325 Note: Cookie secrets should be kept private, do not share config files with
324 cookie_secret stored in plaintext (you can read the value from a file).
326 cookie_secret stored in plaintext (you can read the value from a file).
325 """
327 """
326 )
328 )
327 def _cookie_secret_default(self):
329 def _cookie_secret_default(self):
328 return os.urandom(1024)
330 return os.urandom(1024)
329
331
330 password = Unicode(u'', config=True,
332 password = Unicode(u'', config=True,
331 help="""Hashed password to use for web authentication.
333 help="""Hashed password to use for web authentication.
332
334
333 To generate, type in a python/IPython shell:
335 To generate, type in a python/IPython shell:
334
336
335 from IPython.lib import passwd; passwd()
337 from IPython.lib import passwd; passwd()
336
338
337 The string should be of the form type:salt:hashed-password.
339 The string should be of the form type:salt:hashed-password.
338 """
340 """
339 )
341 )
340
342
341 open_browser = Bool(True, config=True,
343 open_browser = Bool(True, config=True,
342 help="""Whether to open in a browser after starting.
344 help="""Whether to open in a browser after starting.
343 The specific browser used is platform dependent and
345 The specific browser used is platform dependent and
344 determined by the python standard library `webbrowser`
346 determined by the python standard library `webbrowser`
345 module, unless it is overridden using the --browser
347 module, unless it is overridden using the --browser
346 (NotebookApp.browser) configuration option.
348 (NotebookApp.browser) configuration option.
347 """)
349 """)
348
350
349 browser = Unicode(u'', config=True,
351 browser = Unicode(u'', config=True,
350 help="""Specify what command to use to invoke a web
352 help="""Specify what command to use to invoke a web
351 browser when opening the notebook. If not specified, the
353 browser when opening the notebook. If not specified, the
352 default browser will be determined by the `webbrowser`
354 default browser will be determined by the `webbrowser`
353 standard library module, which allows setting of the
355 standard library module, which allows setting of the
354 BROWSER environment variable to override it.
356 BROWSER environment variable to override it.
355 """)
357 """)
356
358
357 use_less = Bool(False, config=True,
359 use_less = Bool(False, config=True,
358 help="""Wether to use Browser Side less-css parsing
360 help="""Wether to use Browser Side less-css parsing
359 instead of compiled css version in templates that allows
361 instead of compiled css version in templates that allows
360 it. This is mainly convenient when working on the less
362 it. This is mainly convenient when working on the less
361 file to avoid a build step, or if user want to overwrite
363 file to avoid a build step, or if user want to overwrite
362 some of the less variables without having to recompile
364 some of the less variables without having to recompile
363 everything.
365 everything.
364
366
365 You will need to install the less.js component in the static directory
367 You will need to install the less.js component in the static directory
366 either in the source tree or in your profile folder.
368 either in the source tree or in your profile folder.
367 """)
369 """)
368
370
369 webapp_settings = Dict(config=True,
371 webapp_settings = Dict(config=True,
370 help="Supply overrides for the tornado.web.Application that the "
372 help="Supply overrides for the tornado.web.Application that the "
371 "IPython notebook uses.")
373 "IPython notebook uses.")
372
374
373 enable_mathjax = Bool(True, config=True,
375 enable_mathjax = Bool(True, config=True,
374 help="""Whether to enable MathJax for typesetting math/TeX
376 help="""Whether to enable MathJax for typesetting math/TeX
375
377
376 MathJax is the javascript library IPython uses to render math/LaTeX. It is
378 MathJax is the javascript library IPython uses to render math/LaTeX. It is
377 very large, so you may want to disable it if you have a slow internet
379 very large, so you may want to disable it if you have a slow internet
378 connection, or for offline use of the notebook.
380 connection, or for offline use of the notebook.
379
381
380 When disabled, equations etc. will appear as their untransformed TeX source.
382 When disabled, equations etc. will appear as their untransformed TeX source.
381 """
383 """
382 )
384 )
383 def _enable_mathjax_changed(self, name, old, new):
385 def _enable_mathjax_changed(self, name, old, new):
384 """set mathjax url to empty if mathjax is disabled"""
386 """set mathjax url to empty if mathjax is disabled"""
385 if not new:
387 if not new:
386 self.mathjax_url = u''
388 self.mathjax_url = u''
387
389
388 base_project_url = Unicode('/', config=True,
390 base_project_url = Unicode('/', config=True,
389 help='''The base URL for the notebook server.
391 help='''The base URL for the notebook server.
390
392
391 Leading and trailing slashes can be omitted,
393 Leading and trailing slashes can be omitted,
392 and will automatically be added.
394 and will automatically be added.
393 ''')
395 ''')
394 def _base_project_url_changed(self, name, old, new):
396 def _base_project_url_changed(self, name, old, new):
395 if not new.startswith('/'):
397 if not new.startswith('/'):
396 self.base_project_url = '/'+new
398 self.base_project_url = '/'+new
397 elif not new.endswith('/'):
399 elif not new.endswith('/'):
398 self.base_project_url = new+'/'
400 self.base_project_url = new+'/'
399
401
400 base_kernel_url = Unicode('/', config=True,
402 base_kernel_url = Unicode('/', config=True,
401 help='''The base URL for the kernel server
403 help='''The base URL for the kernel server
402
404
403 Leading and trailing slashes can be omitted,
405 Leading and trailing slashes can be omitted,
404 and will automatically be added.
406 and will automatically be added.
405 ''')
407 ''')
406 def _base_kernel_url_changed(self, name, old, new):
408 def _base_kernel_url_changed(self, name, old, new):
407 if not new.startswith('/'):
409 if not new.startswith('/'):
408 self.base_kernel_url = '/'+new
410 self.base_kernel_url = '/'+new
409 elif not new.endswith('/'):
411 elif not new.endswith('/'):
410 self.base_kernel_url = new+'/'
412 self.base_kernel_url = new+'/'
411
413
412 websocket_url = Unicode("", config=True,
414 websocket_url = Unicode("", config=True,
413 help="""The base URL for the websocket server,
415 help="""The base URL for the websocket server,
414 if it differs from the HTTP server (hint: it almost certainly doesn't).
416 if it differs from the HTTP server (hint: it almost certainly doesn't).
415
417
416 Should be in the form of an HTTP origin: ws[s]://hostname[:port]
418 Should be in the form of an HTTP origin: ws[s]://hostname[:port]
417 """
419 """
418 )
420 )
419
421
420 extra_static_paths = List(Unicode, config=True,
422 extra_static_paths = List(Unicode, config=True,
421 help="""Extra paths to search for serving static files.
423 help="""Extra paths to search for serving static files.
422
424
423 This allows adding javascript/css to be available from the notebook server machine,
425 This allows adding javascript/css to be available from the notebook server machine,
424 or overriding individual files in the IPython"""
426 or overriding individual files in the IPython"""
425 )
427 )
426 def _extra_static_paths_default(self):
428 def _extra_static_paths_default(self):
427 return [os.path.join(self.profile_dir.location, 'static')]
429 return [os.path.join(self.profile_dir.location, 'static')]
428
430
429 @property
431 @property
430 def static_file_path(self):
432 def static_file_path(self):
431 """return extra paths + the default location"""
433 """return extra paths + the default location"""
432 return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH]
434 return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH]
433
435
434 mathjax_url = Unicode("", config=True,
436 mathjax_url = Unicode("", config=True,
435 help="""The url for MathJax.js."""
437 help="""The url for MathJax.js."""
436 )
438 )
437 def _mathjax_url_default(self):
439 def _mathjax_url_default(self):
438 if not self.enable_mathjax:
440 if not self.enable_mathjax:
439 return u''
441 return u''
440 static_url_prefix = self.webapp_settings.get("static_url_prefix",
442 static_url_prefix = self.webapp_settings.get("static_url_prefix",
441 url_path_join(self.base_project_url, "static")
443 url_path_join(self.base_project_url, "static")
442 )
444 )
443 try:
445 try:
444 mathjax = filefind(os.path.join('mathjax', 'MathJax.js'), self.static_file_path)
446 mathjax = filefind(os.path.join('mathjax', 'MathJax.js'), self.static_file_path)
445 except IOError:
447 except IOError:
446 if self.certfile:
448 if self.certfile:
447 # HTTPS: load from Rackspace CDN, because SSL certificate requires it
449 # HTTPS: load from Rackspace CDN, because SSL certificate requires it
448 base = u"https://c328740.ssl.cf1.rackcdn.com"
450 base = u"https://c328740.ssl.cf1.rackcdn.com"
449 else:
451 else:
450 base = u"http://cdn.mathjax.org"
452 base = u"http://cdn.mathjax.org"
451
453
452 url = base + u"/mathjax/latest/MathJax.js"
454 url = base + u"/mathjax/latest/MathJax.js"
453 self.log.info("Using MathJax from CDN: %s", url)
455 self.log.info("Using MathJax from CDN: %s", url)
454 return url
456 return url
455 else:
457 else:
456 self.log.info("Using local MathJax from %s" % mathjax)
458 self.log.info("Using local MathJax from %s" % mathjax)
457 return url_path_join(static_url_prefix, u"mathjax/MathJax.js")
459 return url_path_join(static_url_prefix, u"mathjax/MathJax.js")
458
460
459 def _mathjax_url_changed(self, name, old, new):
461 def _mathjax_url_changed(self, name, old, new):
460 if new and not self.enable_mathjax:
462 if new and not self.enable_mathjax:
461 # enable_mathjax=False overrides mathjax_url
463 # enable_mathjax=False overrides mathjax_url
462 self.mathjax_url = u''
464 self.mathjax_url = u''
463 else:
465 else:
464 self.log.info("Using MathJax: %s", new)
466 self.log.info("Using MathJax: %s", new)
465
467
466 notebook_manager_class = DottedObjectName('IPython.html.services.notebooks.filenbmanager.FileNotebookManager',
468 notebook_manager_class = DottedObjectName('IPython.html.services.notebooks.filenbmanager.FileNotebookManager',
467 config=True,
469 config=True,
468 help='The notebook manager class to use.')
470 help='The notebook manager class to use.')
469
471
470 trust_xheaders = Bool(False, config=True,
472 trust_xheaders = Bool(False, config=True,
471 help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers"
473 help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers"
472 "sent by the upstream reverse proxy. Neccesary if the proxy handles SSL")
474 "sent by the upstream reverse proxy. Neccesary if the proxy handles SSL")
473 )
475 )
474
476
475 def parse_command_line(self, argv=None):
477 def parse_command_line(self, argv=None):
476 super(NotebookApp, self).parse_command_line(argv)
478 super(NotebookApp, self).parse_command_line(argv)
477
479
478 if self.extra_args:
480 if self.extra_args:
479 f = os.path.abspath(self.extra_args[0])
481 f = os.path.abspath(self.extra_args[0])
480 if os.path.isdir(f):
482 if os.path.isdir(f):
481 nbdir = f
483 nbdir = f
482 else:
484 else:
483 self.file_to_run = f
485 self.file_to_run = f
484 nbdir = os.path.dirname(f)
486 nbdir = os.path.dirname(f)
485 self.config.NotebookManager.notebook_dir = nbdir
487 self.config.NotebookManager.notebook_dir = nbdir
486
488
487 def init_kernel_argv(self):
489 def init_kernel_argv(self):
488 """construct the kernel arguments"""
490 """construct the kernel arguments"""
489 # Scrub frontend-specific flags
491 # Scrub frontend-specific flags
490 self.kernel_argv = swallow_argv(self.argv, notebook_aliases, notebook_flags)
492 self.kernel_argv = swallow_argv(self.argv, notebook_aliases, notebook_flags)
491 # Kernel should inherit default config file from frontend
493 # Kernel should inherit default config file from frontend
492 self.kernel_argv.append("--IPKernelApp.parent_appname='%s'" % self.name)
494 self.kernel_argv.append("--IPKernelApp.parent_appname='%s'" % self.name)
493 # Kernel should get *absolute* path to profile directory
495 # Kernel should get *absolute* path to profile directory
494 self.kernel_argv.extend(["--profile-dir", self.profile_dir.location])
496 self.kernel_argv.extend(["--profile-dir", self.profile_dir.location])
495
497
496 def init_configurables(self):
498 def init_configurables(self):
497 # force Session default to be secure
499 # force Session default to be secure
498 default_secure(self.config)
500 default_secure(self.config)
499 self.kernel_manager = MappingKernelManager(
501 self.kernel_manager = MappingKernelManager(
500 parent=self, log=self.log, kernel_argv=self.kernel_argv,
502 parent=self, log=self.log, kernel_argv=self.kernel_argv,
501 connection_dir = self.profile_dir.security_dir,
503 connection_dir = self.profile_dir.security_dir,
502 )
504 )
503 kls = import_item(self.notebook_manager_class)
505 kls = import_item(self.notebook_manager_class)
504 self.notebook_manager = kls(parent=self, log=self.log)
506 self.notebook_manager = kls(parent=self, log=self.log)
505 self.notebook_manager.load_notebook_names()
507 self.notebook_manager.load_notebook_names()
506 self.cluster_manager = ClusterManager(parent=self, log=self.log)
508 self.cluster_manager = ClusterManager(parent=self, log=self.log)
507 self.cluster_manager.update_profiles()
509 self.cluster_manager.update_profiles()
508
510
509 def init_logging(self):
511 def init_logging(self):
510 # This prevents double log messages because tornado use a root logger that
512 # This prevents double log messages because tornado use a root logger that
511 # self.log is a child of. The logging module dipatches log messages to a log
513 # self.log is a child of. The logging module dipatches log messages to a log
512 # and all of its ancenstors until propagate is set to False.
514 # and all of its ancenstors until propagate is set to False.
513 self.log.propagate = False
515 self.log.propagate = False
514
516
515 # hook up tornado 3's loggers to our app handlers
517 # hook up tornado 3's loggers to our app handlers
516 for name in ('access', 'application', 'general'):
518 for name in ('access', 'application', 'general'):
517 logging.getLogger('tornado.%s' % name).handlers = self.log.handlers
519 logging.getLogger('tornado.%s' % name).handlers = self.log.handlers
518
520
519 def init_webapp(self):
521 def init_webapp(self):
520 """initialize tornado webapp and httpserver"""
522 """initialize tornado webapp and httpserver"""
521 self.web_app = NotebookWebApplication(
523 self.web_app = NotebookWebApplication(
522 self, self.kernel_manager, self.notebook_manager,
524 self, self.kernel_manager, self.notebook_manager,
523 self.cluster_manager, self.log,
525 self.cluster_manager, self.log,
524 self.base_project_url, self.webapp_settings
526 self.base_project_url, self.webapp_settings
525 )
527 )
526 if self.certfile:
528 if self.certfile:
527 ssl_options = dict(certfile=self.certfile)
529 ssl_options = dict(certfile=self.certfile)
528 if self.keyfile:
530 if self.keyfile:
529 ssl_options['keyfile'] = self.keyfile
531 ssl_options['keyfile'] = self.keyfile
530 else:
532 else:
531 ssl_options = None
533 ssl_options = None
532 self.web_app.password = self.password
534 self.web_app.password = self.password
533 self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options,
535 self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options,
534 xheaders=self.trust_xheaders)
536 xheaders=self.trust_xheaders)
535 if not self.ip:
537 if not self.ip:
536 warning = "WARNING: The notebook server is listening on all IP addresses"
538 warning = "WARNING: The notebook server is listening on all IP addresses"
537 if ssl_options is None:
539 if ssl_options is None:
538 self.log.critical(warning + " and not using encryption. This "
540 self.log.critical(warning + " and not using encryption. This "
539 "is not recommended.")
541 "is not recommended.")
540 if not self.password:
542 if not self.password:
541 self.log.critical(warning + " and not using authentication. "
543 self.log.critical(warning + " and not using authentication. "
542 "This is highly insecure and not recommended.")
544 "This is highly insecure and not recommended.")
543 success = None
545 success = None
544 for port in random_ports(self.port, self.port_retries+1):
546 for port in random_ports(self.port, self.port_retries+1):
545 try:
547 try:
546 self.http_server.listen(port, self.ip)
548 self.http_server.listen(port, self.ip)
547 except socket.error as e:
549 except socket.error as e:
548 # XXX: remove the e.errno == -9 block when we require
550 # XXX: remove the e.errno == -9 block when we require
549 # tornado >= 3.0
551 # tornado >= 3.0
550 if e.errno == -9 and tornado.version_info[0] < 3:
552 if e.errno == -9 and tornado.version_info[0] < 3:
551 # The flags passed to socket.getaddrinfo from
553 # The flags passed to socket.getaddrinfo from
552 # tornado.netutils.bind_sockets can cause "gaierror:
554 # tornado.netutils.bind_sockets can cause "gaierror:
553 # [Errno -9] Address family for hostname not supported"
555 # [Errno -9] Address family for hostname not supported"
554 # when the interface is not associated, for example.
556 # when the interface is not associated, for example.
555 # Changing the flags to exclude socket.AI_ADDRCONFIG does
557 # Changing the flags to exclude socket.AI_ADDRCONFIG does
556 # not cause this error, but the only way to do this is to
558 # not cause this error, but the only way to do this is to
557 # monkeypatch socket to remove the AI_ADDRCONFIG attribute
559 # monkeypatch socket to remove the AI_ADDRCONFIG attribute
558 saved_AI_ADDRCONFIG = socket.AI_ADDRCONFIG
560 saved_AI_ADDRCONFIG = socket.AI_ADDRCONFIG
559 self.log.warn('Monkeypatching socket to fix tornado bug')
561 self.log.warn('Monkeypatching socket to fix tornado bug')
560 del(socket.AI_ADDRCONFIG)
562 del(socket.AI_ADDRCONFIG)
561 try:
563 try:
562 # retry the tornado call without AI_ADDRCONFIG flags
564 # retry the tornado call without AI_ADDRCONFIG flags
563 self.http_server.listen(port, self.ip)
565 self.http_server.listen(port, self.ip)
564 except socket.error as e2:
566 except socket.error as e2:
565 e = e2
567 e = e2
566 else:
568 else:
567 self.port = port
569 self.port = port
568 success = True
570 success = True
569 break
571 break
570 # restore the monekypatch
572 # restore the monekypatch
571 socket.AI_ADDRCONFIG = saved_AI_ADDRCONFIG
573 socket.AI_ADDRCONFIG = saved_AI_ADDRCONFIG
572 if e.errno != errno.EADDRINUSE:
574 if e.errno != errno.EADDRINUSE:
573 raise
575 raise
574 self.log.info('The port %i is already in use, trying another random port.' % port)
576 self.log.info('The port %i is already in use, trying another random port.' % port)
575 else:
577 else:
576 self.port = port
578 self.port = port
577 success = True
579 success = True
578 break
580 break
579 if not success:
581 if not success:
580 self.log.critical('ERROR: the notebook server could not be started because '
582 self.log.critical('ERROR: the notebook server could not be started because '
581 'no available port could be found.')
583 'no available port could be found.')
582 self.exit(1)
584 self.exit(1)
583
585
584 def init_signal(self):
586 def init_signal(self):
585 if not sys.platform.startswith('win'):
587 if not sys.platform.startswith('win'):
586 signal.signal(signal.SIGINT, self._handle_sigint)
588 signal.signal(signal.SIGINT, self._handle_sigint)
587 signal.signal(signal.SIGTERM, self._signal_stop)
589 signal.signal(signal.SIGTERM, self._signal_stop)
588 if hasattr(signal, 'SIGUSR1'):
590 if hasattr(signal, 'SIGUSR1'):
589 # Windows doesn't support SIGUSR1
591 # Windows doesn't support SIGUSR1
590 signal.signal(signal.SIGUSR1, self._signal_info)
592 signal.signal(signal.SIGUSR1, self._signal_info)
591 if hasattr(signal, 'SIGINFO'):
593 if hasattr(signal, 'SIGINFO'):
592 # only on BSD-based systems
594 # only on BSD-based systems
593 signal.signal(signal.SIGINFO, self._signal_info)
595 signal.signal(signal.SIGINFO, self._signal_info)
594
596
595 def _handle_sigint(self, sig, frame):
597 def _handle_sigint(self, sig, frame):
596 """SIGINT handler spawns confirmation dialog"""
598 """SIGINT handler spawns confirmation dialog"""
597 # register more forceful signal handler for ^C^C case
599 # register more forceful signal handler for ^C^C case
598 signal.signal(signal.SIGINT, self._signal_stop)
600 signal.signal(signal.SIGINT, self._signal_stop)
599 # request confirmation dialog in bg thread, to avoid
601 # request confirmation dialog in bg thread, to avoid
600 # blocking the App
602 # blocking the App
601 thread = threading.Thread(target=self._confirm_exit)
603 thread = threading.Thread(target=self._confirm_exit)
602 thread.daemon = True
604 thread.daemon = True
603 thread.start()
605 thread.start()
604
606
605 def _restore_sigint_handler(self):
607 def _restore_sigint_handler(self):
606 """callback for restoring original SIGINT handler"""
608 """callback for restoring original SIGINT handler"""
607 signal.signal(signal.SIGINT, self._handle_sigint)
609 signal.signal(signal.SIGINT, self._handle_sigint)
608
610
609 def _confirm_exit(self):
611 def _confirm_exit(self):
610 """confirm shutdown on ^C
612 """confirm shutdown on ^C
611
613
612 A second ^C, or answering 'y' within 5s will cause shutdown,
614 A second ^C, or answering 'y' within 5s will cause shutdown,
613 otherwise original SIGINT handler will be restored.
615 otherwise original SIGINT handler will be restored.
614
616
615 This doesn't work on Windows.
617 This doesn't work on Windows.
616 """
618 """
617 # FIXME: remove this delay when pyzmq dependency is >= 2.1.11
619 # FIXME: remove this delay when pyzmq dependency is >= 2.1.11
618 time.sleep(0.1)
620 time.sleep(0.1)
619 info = self.log.info
621 info = self.log.info
620 info('interrupted')
622 info('interrupted')
621 print self.notebook_info()
623 print self.notebook_info()
622 sys.stdout.write("Shutdown this notebook server (y/[n])? ")
624 sys.stdout.write("Shutdown this notebook server (y/[n])? ")
623 sys.stdout.flush()
625 sys.stdout.flush()
624 r,w,x = select.select([sys.stdin], [], [], 5)
626 r,w,x = select.select([sys.stdin], [], [], 5)
625 if r:
627 if r:
626 line = sys.stdin.readline()
628 line = sys.stdin.readline()
627 if line.lower().startswith('y'):
629 if line.lower().startswith('y'):
628 self.log.critical("Shutdown confirmed")
630 self.log.critical("Shutdown confirmed")
629 ioloop.IOLoop.instance().stop()
631 ioloop.IOLoop.instance().stop()
630 return
632 return
631 else:
633 else:
632 print "No answer for 5s:",
634 print "No answer for 5s:",
633 print "resuming operation..."
635 print "resuming operation..."
634 # no answer, or answer is no:
636 # no answer, or answer is no:
635 # set it back to original SIGINT handler
637 # set it back to original SIGINT handler
636 # use IOLoop.add_callback because signal.signal must be called
638 # use IOLoop.add_callback because signal.signal must be called
637 # from main thread
639 # from main thread
638 ioloop.IOLoop.instance().add_callback(self._restore_sigint_handler)
640 ioloop.IOLoop.instance().add_callback(self._restore_sigint_handler)
639
641
640 def _signal_stop(self, sig, frame):
642 def _signal_stop(self, sig, frame):
641 self.log.critical("received signal %s, stopping", sig)
643 self.log.critical("received signal %s, stopping", sig)
642 ioloop.IOLoop.instance().stop()
644 ioloop.IOLoop.instance().stop()
643
645
644 def _signal_info(self, sig, frame):
646 def _signal_info(self, sig, frame):
645 print self.notebook_info()
647 print self.notebook_info()
646
648
647 def init_components(self):
649 def init_components(self):
648 """Check the components submodule, and warn if it's unclean"""
650 """Check the components submodule, and warn if it's unclean"""
649 status = submodule.check_submodule_status()
651 status = submodule.check_submodule_status()
650 if status == 'missing':
652 if status == 'missing':
651 self.log.warn("components submodule missing, running `git submodule update`")
653 self.log.warn("components submodule missing, running `git submodule update`")
652 submodule.update_submodules(submodule.ipython_parent())
654 submodule.update_submodules(submodule.ipython_parent())
653 elif status == 'unclean':
655 elif status == 'unclean':
654 self.log.warn("components submodule unclean, you may see 404s on static/components")
656 self.log.warn("components submodule unclean, you may see 404s on static/components")
655 self.log.warn("run `setup.py submodule` or `git submodule update` to update")
657 self.log.warn("run `setup.py submodule` or `git submodule update` to update")
656
658
657
659
658 @catch_config_error
660 @catch_config_error
659 def initialize(self, argv=None):
661 def initialize(self, argv=None):
660 self.init_logging()
662 self.init_logging()
661 super(NotebookApp, self).initialize(argv)
663 super(NotebookApp, self).initialize(argv)
662 self.init_kernel_argv()
664 self.init_kernel_argv()
663 self.init_configurables()
665 self.init_configurables()
664 self.init_components()
666 self.init_components()
665 self.init_webapp()
667 self.init_webapp()
666 self.init_signal()
668 self.init_signal()
667
669
668 def cleanup_kernels(self):
670 def cleanup_kernels(self):
669 """Shutdown all kernels.
671 """Shutdown all kernels.
670
672
671 The kernels will shutdown themselves when this process no longer exists,
673 The kernels will shutdown themselves when this process no longer exists,
672 but explicit shutdown allows the KernelManagers to cleanup the connection files.
674 but explicit shutdown allows the KernelManagers to cleanup the connection files.
673 """
675 """
674 self.log.info('Shutting down kernels')
676 self.log.info('Shutting down kernels')
675 self.kernel_manager.shutdown_all()
677 self.kernel_manager.shutdown_all()
676
678
677 def notebook_info(self):
679 def notebook_info(self):
678 "Return the current working directory and the server url information"
680 "Return the current working directory and the server url information"
679 mgr_info = self.notebook_manager.info_string() + "\n"
681 mgr_info = self.notebook_manager.info_string() + "\n"
680 return mgr_info +"The IPython Notebook is running at: %s" % self._url
682 return mgr_info +"The IPython Notebook is running at: %s" % self._url
681
683
682 def start(self):
684 def start(self):
683 """ Start the IPython Notebook server app, after initialization
685 """ Start the IPython Notebook server app, after initialization
684
686
685 This method takes no arguments so all configuration and initialization
687 This method takes no arguments so all configuration and initialization
686 must be done prior to calling this method."""
688 must be done prior to calling this method."""
687 ip = self.ip if self.ip else '[all ip addresses on your system]'
689 ip = self.ip if self.ip else '[all ip addresses on your system]'
688 proto = 'https' if self.certfile else 'http'
690 proto = 'https' if self.certfile else 'http'
689 info = self.log.info
691 info = self.log.info
690 self._url = "%s://%s:%i%s" % (proto, ip, self.port,
692 self._url = "%s://%s:%i%s" % (proto, ip, self.port,
691 self.base_project_url)
693 self.base_project_url)
692 for line in self.notebook_info().split("\n"):
694 for line in self.notebook_info().split("\n"):
693 info(line)
695 info(line)
694 info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).")
696 info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).")
695
697
696 if self.open_browser or self.file_to_run:
698 if self.open_browser or self.file_to_run:
697 ip = self.ip or LOCALHOST
699 ip = self.ip or localhost()
698 try:
700 try:
699 browser = webbrowser.get(self.browser or None)
701 browser = webbrowser.get(self.browser or None)
700 except webbrowser.Error as e:
702 except webbrowser.Error as e:
701 self.log.warn('No web browser found: %s.' % e)
703 self.log.warn('No web browser found: %s.' % e)
702 browser = None
704 browser = None
703
705
704 if self.file_to_run:
706 if self.file_to_run:
705 name, _ = os.path.splitext(os.path.basename(self.file_to_run))
707 name, _ = os.path.splitext(os.path.basename(self.file_to_run))
706 url = self.notebook_manager.rev_mapping.get(name, '')
708 url = self.notebook_manager.rev_mapping.get(name, '')
707 else:
709 else:
708 url = ''
710 url = ''
709 if browser:
711 if browser:
710 b = lambda : browser.open("%s://%s:%i%s%s" % (proto, ip,
712 b = lambda : browser.open("%s://%s:%i%s%s" % (proto, ip,
711 self.port, self.base_project_url, url), new=2)
713 self.port, self.base_project_url, url), new=2)
712 threading.Thread(target=b).start()
714 threading.Thread(target=b).start()
713 try:
715 try:
714 ioloop.IOLoop.instance().start()
716 ioloop.IOLoop.instance().start()
715 except KeyboardInterrupt:
717 except KeyboardInterrupt:
716 info("Interrupted...")
718 info("Interrupted...")
717 finally:
719 finally:
718 self.cleanup_kernels()
720 self.cleanup_kernels()
719
721
720
722
721 #-----------------------------------------------------------------------------
723 #-----------------------------------------------------------------------------
722 # Main entry point
724 # Main entry point
723 #-----------------------------------------------------------------------------
725 #-----------------------------------------------------------------------------
724
726
725 launch_new_instance = NotebookApp.launch_instance
727 launch_new_instance = NotebookApp.launch_instance
726
728
@@ -1,556 +1,558 b''
1 """Utilities for connecting to kernels
1 """Utilities for connecting to kernels
2
2
3 Authors:
3 Authors:
4
4
5 * Min Ragan-Kelley
5 * Min Ragan-Kelley
6
6
7 """
7 """
8
8
9 #-----------------------------------------------------------------------------
9 #-----------------------------------------------------------------------------
10 # Copyright (C) 2013 The IPython Development Team
10 # Copyright (C) 2013 The IPython Development Team
11 #
11 #
12 # Distributed under the terms of the BSD License. The full license is in
12 # Distributed under the terms of the BSD License. The full license is in
13 # the file COPYING, distributed as part of this software.
13 # the file COPYING, distributed as part of this software.
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15
15
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17 # Imports
17 # Imports
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19
19
20 from __future__ import absolute_import
20 from __future__ import absolute_import
21
21
22 import glob
22 import glob
23 import json
23 import json
24 import os
24 import os
25 import socket
25 import socket
26 import sys
26 import sys
27 from getpass import getpass
27 from getpass import getpass
28 from subprocess import Popen, PIPE
28 from subprocess import Popen, PIPE
29 import tempfile
29 import tempfile
30
30
31 import zmq
31 import zmq
32
32
33 # external imports
33 # external imports
34 from IPython.external.ssh import tunnel
34 from IPython.external.ssh import tunnel
35
35
36 # IPython imports
36 # IPython imports
37 from IPython.config import Configurable
37 from IPython.config import Configurable
38 from IPython.core.profiledir import ProfileDir
38 from IPython.core.profiledir import ProfileDir
39 from IPython.utils.localinterfaces import LOCALHOST
39 from IPython.utils.localinterfaces import localhost
40 from IPython.utils.path import filefind, get_ipython_dir
40 from IPython.utils.path import filefind, get_ipython_dir
41 from IPython.utils.py3compat import str_to_bytes, bytes_to_str
41 from IPython.utils.py3compat import str_to_bytes, bytes_to_str
42 from IPython.utils.traitlets import (
42 from IPython.utils.traitlets import (
43 Bool, Integer, Unicode, CaselessStrEnum,
43 Bool, Integer, Unicode, CaselessStrEnum,
44 )
44 )
45
45
46
46
47 #-----------------------------------------------------------------------------
47 #-----------------------------------------------------------------------------
48 # Working with Connection Files
48 # Working with Connection Files
49 #-----------------------------------------------------------------------------
49 #-----------------------------------------------------------------------------
50
50
51 def write_connection_file(fname=None, shell_port=0, iopub_port=0, stdin_port=0, hb_port=0,
51 def write_connection_file(fname=None, shell_port=0, iopub_port=0, stdin_port=0, hb_port=0,
52 control_port=0, ip=LOCALHOST, key=b'', transport='tcp',
52 control_port=0, ip='', key=b'', transport='tcp',
53 signature_scheme='hmac-sha256',
53 signature_scheme='hmac-sha256',
54 ):
54 ):
55 """Generates a JSON config file, including the selection of random ports.
55 """Generates a JSON config file, including the selection of random ports.
56
56
57 Parameters
57 Parameters
58 ----------
58 ----------
59
59
60 fname : unicode
60 fname : unicode
61 The path to the file to write
61 The path to the file to write
62
62
63 shell_port : int, optional
63 shell_port : int, optional
64 The port to use for ROUTER (shell) channel.
64 The port to use for ROUTER (shell) channel.
65
65
66 iopub_port : int, optional
66 iopub_port : int, optional
67 The port to use for the SUB channel.
67 The port to use for the SUB channel.
68
68
69 stdin_port : int, optional
69 stdin_port : int, optional
70 The port to use for the ROUTER (raw input) channel.
70 The port to use for the ROUTER (raw input) channel.
71
71
72 control_port : int, optional
72 control_port : int, optional
73 The port to use for the ROUTER (control) channel.
73 The port to use for the ROUTER (control) channel.
74
74
75 hb_port : int, optional
75 hb_port : int, optional
76 The port to use for the heartbeat REP channel.
76 The port to use for the heartbeat REP channel.
77
77
78 ip : str, optional
78 ip : str, optional
79 The ip address the kernel will bind to.
79 The ip address the kernel will bind to.
80
80
81 key : str, optional
81 key : str, optional
82 The Session key used for message authentication.
82 The Session key used for message authentication.
83
83
84 signature_scheme : str, optional
84 signature_scheme : str, optional
85 The scheme used for message authentication.
85 The scheme used for message authentication.
86 This has the form 'digest-hash', where 'digest'
86 This has the form 'digest-hash', where 'digest'
87 is the scheme used for digests, and 'hash' is the name of the hash function
87 is the scheme used for digests, and 'hash' is the name of the hash function
88 used by the digest scheme.
88 used by the digest scheme.
89 Currently, 'hmac' is the only supported digest scheme,
89 Currently, 'hmac' is the only supported digest scheme,
90 and 'sha256' is the default hash function.
90 and 'sha256' is the default hash function.
91
91
92 """
92 """
93 if not ip:
94 ip = localhost()
93 # default to temporary connector file
95 # default to temporary connector file
94 if not fname:
96 if not fname:
95 fname = tempfile.mktemp('.json')
97 fname = tempfile.mktemp('.json')
96
98
97 # Find open ports as necessary.
99 # Find open ports as necessary.
98
100
99 ports = []
101 ports = []
100 ports_needed = int(shell_port <= 0) + \
102 ports_needed = int(shell_port <= 0) + \
101 int(iopub_port <= 0) + \
103 int(iopub_port <= 0) + \
102 int(stdin_port <= 0) + \
104 int(stdin_port <= 0) + \
103 int(control_port <= 0) + \
105 int(control_port <= 0) + \
104 int(hb_port <= 0)
106 int(hb_port <= 0)
105 if transport == 'tcp':
107 if transport == 'tcp':
106 for i in range(ports_needed):
108 for i in range(ports_needed):
107 sock = socket.socket()
109 sock = socket.socket()
108 sock.bind(('', 0))
110 sock.bind(('', 0))
109 ports.append(sock)
111 ports.append(sock)
110 for i, sock in enumerate(ports):
112 for i, sock in enumerate(ports):
111 port = sock.getsockname()[1]
113 port = sock.getsockname()[1]
112 sock.close()
114 sock.close()
113 ports[i] = port
115 ports[i] = port
114 else:
116 else:
115 N = 1
117 N = 1
116 for i in range(ports_needed):
118 for i in range(ports_needed):
117 while os.path.exists("%s-%s" % (ip, str(N))):
119 while os.path.exists("%s-%s" % (ip, str(N))):
118 N += 1
120 N += 1
119 ports.append(N)
121 ports.append(N)
120 N += 1
122 N += 1
121 if shell_port <= 0:
123 if shell_port <= 0:
122 shell_port = ports.pop(0)
124 shell_port = ports.pop(0)
123 if iopub_port <= 0:
125 if iopub_port <= 0:
124 iopub_port = ports.pop(0)
126 iopub_port = ports.pop(0)
125 if stdin_port <= 0:
127 if stdin_port <= 0:
126 stdin_port = ports.pop(0)
128 stdin_port = ports.pop(0)
127 if control_port <= 0:
129 if control_port <= 0:
128 control_port = ports.pop(0)
130 control_port = ports.pop(0)
129 if hb_port <= 0:
131 if hb_port <= 0:
130 hb_port = ports.pop(0)
132 hb_port = ports.pop(0)
131
133
132 cfg = dict( shell_port=shell_port,
134 cfg = dict( shell_port=shell_port,
133 iopub_port=iopub_port,
135 iopub_port=iopub_port,
134 stdin_port=stdin_port,
136 stdin_port=stdin_port,
135 control_port=control_port,
137 control_port=control_port,
136 hb_port=hb_port,
138 hb_port=hb_port,
137 )
139 )
138 cfg['ip'] = ip
140 cfg['ip'] = ip
139 cfg['key'] = bytes_to_str(key)
141 cfg['key'] = bytes_to_str(key)
140 cfg['transport'] = transport
142 cfg['transport'] = transport
141 cfg['signature_scheme'] = signature_scheme
143 cfg['signature_scheme'] = signature_scheme
142
144
143 with open(fname, 'w') as f:
145 with open(fname, 'w') as f:
144 f.write(json.dumps(cfg, indent=2))
146 f.write(json.dumps(cfg, indent=2))
145
147
146 return fname, cfg
148 return fname, cfg
147
149
148
150
149 def get_connection_file(app=None):
151 def get_connection_file(app=None):
150 """Return the path to the connection file of an app
152 """Return the path to the connection file of an app
151
153
152 Parameters
154 Parameters
153 ----------
155 ----------
154 app : IPKernelApp instance [optional]
156 app : IPKernelApp instance [optional]
155 If unspecified, the currently running app will be used
157 If unspecified, the currently running app will be used
156 """
158 """
157 if app is None:
159 if app is None:
158 from IPython.kernel.zmq.kernelapp import IPKernelApp
160 from IPython.kernel.zmq.kernelapp import IPKernelApp
159 if not IPKernelApp.initialized():
161 if not IPKernelApp.initialized():
160 raise RuntimeError("app not specified, and not in a running Kernel")
162 raise RuntimeError("app not specified, and not in a running Kernel")
161
163
162 app = IPKernelApp.instance()
164 app = IPKernelApp.instance()
163 return filefind(app.connection_file, ['.', app.profile_dir.security_dir])
165 return filefind(app.connection_file, ['.', app.profile_dir.security_dir])
164
166
165
167
166 def find_connection_file(filename, profile=None):
168 def find_connection_file(filename, profile=None):
167 """find a connection file, and return its absolute path.
169 """find a connection file, and return its absolute path.
168
170
169 The current working directory and the profile's security
171 The current working directory and the profile's security
170 directory will be searched for the file if it is not given by
172 directory will be searched for the file if it is not given by
171 absolute path.
173 absolute path.
172
174
173 If profile is unspecified, then the current running application's
175 If profile is unspecified, then the current running application's
174 profile will be used, or 'default', if not run from IPython.
176 profile will be used, or 'default', if not run from IPython.
175
177
176 If the argument does not match an existing file, it will be interpreted as a
178 If the argument does not match an existing file, it will be interpreted as a
177 fileglob, and the matching file in the profile's security dir with
179 fileglob, and the matching file in the profile's security dir with
178 the latest access time will be used.
180 the latest access time will be used.
179
181
180 Parameters
182 Parameters
181 ----------
183 ----------
182 filename : str
184 filename : str
183 The connection file or fileglob to search for.
185 The connection file or fileglob to search for.
184 profile : str [optional]
186 profile : str [optional]
185 The name of the profile to use when searching for the connection file,
187 The name of the profile to use when searching for the connection file,
186 if different from the current IPython session or 'default'.
188 if different from the current IPython session or 'default'.
187
189
188 Returns
190 Returns
189 -------
191 -------
190 str : The absolute path of the connection file.
192 str : The absolute path of the connection file.
191 """
193 """
192 from IPython.core.application import BaseIPythonApplication as IPApp
194 from IPython.core.application import BaseIPythonApplication as IPApp
193 try:
195 try:
194 # quick check for absolute path, before going through logic
196 # quick check for absolute path, before going through logic
195 return filefind(filename)
197 return filefind(filename)
196 except IOError:
198 except IOError:
197 pass
199 pass
198
200
199 if profile is None:
201 if profile is None:
200 # profile unspecified, check if running from an IPython app
202 # profile unspecified, check if running from an IPython app
201 if IPApp.initialized():
203 if IPApp.initialized():
202 app = IPApp.instance()
204 app = IPApp.instance()
203 profile_dir = app.profile_dir
205 profile_dir = app.profile_dir
204 else:
206 else:
205 # not running in IPython, use default profile
207 # not running in IPython, use default profile
206 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), 'default')
208 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), 'default')
207 else:
209 else:
208 # find profiledir by profile name:
210 # find profiledir by profile name:
209 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
211 profile_dir = ProfileDir.find_profile_dir_by_name(get_ipython_dir(), profile)
210 security_dir = profile_dir.security_dir
212 security_dir = profile_dir.security_dir
211
213
212 try:
214 try:
213 # first, try explicit name
215 # first, try explicit name
214 return filefind(filename, ['.', security_dir])
216 return filefind(filename, ['.', security_dir])
215 except IOError:
217 except IOError:
216 pass
218 pass
217
219
218 # not found by full name
220 # not found by full name
219
221
220 if '*' in filename:
222 if '*' in filename:
221 # given as a glob already
223 # given as a glob already
222 pat = filename
224 pat = filename
223 else:
225 else:
224 # accept any substring match
226 # accept any substring match
225 pat = '*%s*' % filename
227 pat = '*%s*' % filename
226 matches = glob.glob( os.path.join(security_dir, pat) )
228 matches = glob.glob( os.path.join(security_dir, pat) )
227 if not matches:
229 if not matches:
228 raise IOError("Could not find %r in %r" % (filename, security_dir))
230 raise IOError("Could not find %r in %r" % (filename, security_dir))
229 elif len(matches) == 1:
231 elif len(matches) == 1:
230 return matches[0]
232 return matches[0]
231 else:
233 else:
232 # get most recent match, by access time:
234 # get most recent match, by access time:
233 return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1]
235 return sorted(matches, key=lambda f: os.stat(f).st_atime)[-1]
234
236
235
237
236 def get_connection_info(connection_file=None, unpack=False, profile=None):
238 def get_connection_info(connection_file=None, unpack=False, profile=None):
237 """Return the connection information for the current Kernel.
239 """Return the connection information for the current Kernel.
238
240
239 Parameters
241 Parameters
240 ----------
242 ----------
241 connection_file : str [optional]
243 connection_file : str [optional]
242 The connection file to be used. Can be given by absolute path, or
244 The connection file to be used. Can be given by absolute path, or
243 IPython will search in the security directory of a given profile.
245 IPython will search in the security directory of a given profile.
244 If run from IPython,
246 If run from IPython,
245
247
246 If unspecified, the connection file for the currently running
248 If unspecified, the connection file for the currently running
247 IPython Kernel will be used, which is only allowed from inside a kernel.
249 IPython Kernel will be used, which is only allowed from inside a kernel.
248 unpack : bool [default: False]
250 unpack : bool [default: False]
249 if True, return the unpacked dict, otherwise just the string contents
251 if True, return the unpacked dict, otherwise just the string contents
250 of the file.
252 of the file.
251 profile : str [optional]
253 profile : str [optional]
252 The name of the profile to use when searching for the connection file,
254 The name of the profile to use when searching for the connection file,
253 if different from the current IPython session or 'default'.
255 if different from the current IPython session or 'default'.
254
256
255
257
256 Returns
258 Returns
257 -------
259 -------
258 The connection dictionary of the current kernel, as string or dict,
260 The connection dictionary of the current kernel, as string or dict,
259 depending on `unpack`.
261 depending on `unpack`.
260 """
262 """
261 if connection_file is None:
263 if connection_file is None:
262 # get connection file from current kernel
264 # get connection file from current kernel
263 cf = get_connection_file()
265 cf = get_connection_file()
264 else:
266 else:
265 # connection file specified, allow shortnames:
267 # connection file specified, allow shortnames:
266 cf = find_connection_file(connection_file, profile=profile)
268 cf = find_connection_file(connection_file, profile=profile)
267
269
268 with open(cf) as f:
270 with open(cf) as f:
269 info = f.read()
271 info = f.read()
270
272
271 if unpack:
273 if unpack:
272 info = json.loads(info)
274 info = json.loads(info)
273 # ensure key is bytes:
275 # ensure key is bytes:
274 info['key'] = str_to_bytes(info.get('key', ''))
276 info['key'] = str_to_bytes(info.get('key', ''))
275 return info
277 return info
276
278
277
279
278 def connect_qtconsole(connection_file=None, argv=None, profile=None):
280 def connect_qtconsole(connection_file=None, argv=None, profile=None):
279 """Connect a qtconsole to the current kernel.
281 """Connect a qtconsole to the current kernel.
280
282
281 This is useful for connecting a second qtconsole to a kernel, or to a
283 This is useful for connecting a second qtconsole to a kernel, or to a
282 local notebook.
284 local notebook.
283
285
284 Parameters
286 Parameters
285 ----------
287 ----------
286 connection_file : str [optional]
288 connection_file : str [optional]
287 The connection file to be used. Can be given by absolute path, or
289 The connection file to be used. Can be given by absolute path, or
288 IPython will search in the security directory of a given profile.
290 IPython will search in the security directory of a given profile.
289 If run from IPython,
291 If run from IPython,
290
292
291 If unspecified, the connection file for the currently running
293 If unspecified, the connection file for the currently running
292 IPython Kernel will be used, which is only allowed from inside a kernel.
294 IPython Kernel will be used, which is only allowed from inside a kernel.
293 argv : list [optional]
295 argv : list [optional]
294 Any extra args to be passed to the console.
296 Any extra args to be passed to the console.
295 profile : str [optional]
297 profile : str [optional]
296 The name of the profile to use when searching for the connection file,
298 The name of the profile to use when searching for the connection file,
297 if different from the current IPython session or 'default'.
299 if different from the current IPython session or 'default'.
298
300
299
301
300 Returns
302 Returns
301 -------
303 -------
302 subprocess.Popen instance running the qtconsole frontend
304 subprocess.Popen instance running the qtconsole frontend
303 """
305 """
304 argv = [] if argv is None else argv
306 argv = [] if argv is None else argv
305
307
306 if connection_file is None:
308 if connection_file is None:
307 # get connection file from current kernel
309 # get connection file from current kernel
308 cf = get_connection_file()
310 cf = get_connection_file()
309 else:
311 else:
310 cf = find_connection_file(connection_file, profile=profile)
312 cf = find_connection_file(connection_file, profile=profile)
311
313
312 cmd = ';'.join([
314 cmd = ';'.join([
313 "from IPython.qt.console import qtconsoleapp",
315 "from IPython.qt.console import qtconsoleapp",
314 "qtconsoleapp.main()"
316 "qtconsoleapp.main()"
315 ])
317 ])
316
318
317 return Popen([sys.executable, '-c', cmd, '--existing', cf] + argv,
319 return Popen([sys.executable, '-c', cmd, '--existing', cf] + argv,
318 stdout=PIPE, stderr=PIPE, close_fds=(sys.platform != 'win32'),
320 stdout=PIPE, stderr=PIPE, close_fds=(sys.platform != 'win32'),
319 )
321 )
320
322
321
323
322 def tunnel_to_kernel(connection_info, sshserver, sshkey=None):
324 def tunnel_to_kernel(connection_info, sshserver, sshkey=None):
323 """tunnel connections to a kernel via ssh
325 """tunnel connections to a kernel via ssh
324
326
325 This will open four SSH tunnels from localhost on this machine to the
327 This will open four SSH tunnels from localhost on this machine to the
326 ports associated with the kernel. They can be either direct
328 ports associated with the kernel. They can be either direct
327 localhost-localhost tunnels, or if an intermediate server is necessary,
329 localhost-localhost tunnels, or if an intermediate server is necessary,
328 the kernel must be listening on a public IP.
330 the kernel must be listening on a public IP.
329
331
330 Parameters
332 Parameters
331 ----------
333 ----------
332 connection_info : dict or str (path)
334 connection_info : dict or str (path)
333 Either a connection dict, or the path to a JSON connection file
335 Either a connection dict, or the path to a JSON connection file
334 sshserver : str
336 sshserver : str
335 The ssh sever to use to tunnel to the kernel. Can be a full
337 The ssh sever to use to tunnel to the kernel. Can be a full
336 `user@server:port` string. ssh config aliases are respected.
338 `user@server:port` string. ssh config aliases are respected.
337 sshkey : str [optional]
339 sshkey : str [optional]
338 Path to file containing ssh key to use for authentication.
340 Path to file containing ssh key to use for authentication.
339 Only necessary if your ssh config does not already associate
341 Only necessary if your ssh config does not already associate
340 a keyfile with the host.
342 a keyfile with the host.
341
343
342 Returns
344 Returns
343 -------
345 -------
344
346
345 (shell, iopub, stdin, hb) : ints
347 (shell, iopub, stdin, hb) : ints
346 The four ports on localhost that have been forwarded to the kernel.
348 The four ports on localhost that have been forwarded to the kernel.
347 """
349 """
348 if isinstance(connection_info, basestring):
350 if isinstance(connection_info, basestring):
349 # it's a path, unpack it
351 # it's a path, unpack it
350 with open(connection_info) as f:
352 with open(connection_info) as f:
351 connection_info = json.loads(f.read())
353 connection_info = json.loads(f.read())
352
354
353 cf = connection_info
355 cf = connection_info
354
356
355 lports = tunnel.select_random_ports(4)
357 lports = tunnel.select_random_ports(4)
356 rports = cf['shell_port'], cf['iopub_port'], cf['stdin_port'], cf['hb_port']
358 rports = cf['shell_port'], cf['iopub_port'], cf['stdin_port'], cf['hb_port']
357
359
358 remote_ip = cf['ip']
360 remote_ip = cf['ip']
359
361
360 if tunnel.try_passwordless_ssh(sshserver, sshkey):
362 if tunnel.try_passwordless_ssh(sshserver, sshkey):
361 password=False
363 password=False
362 else:
364 else:
363 password = getpass("SSH Password for %s: "%sshserver)
365 password = getpass("SSH Password for %s: "%sshserver)
364
366
365 for lp,rp in zip(lports, rports):
367 for lp,rp in zip(lports, rports):
366 tunnel.ssh_tunnel(lp, rp, sshserver, remote_ip, sshkey, password)
368 tunnel.ssh_tunnel(lp, rp, sshserver, remote_ip, sshkey, password)
367
369
368 return tuple(lports)
370 return tuple(lports)
369
371
370
372
371 #-----------------------------------------------------------------------------
373 #-----------------------------------------------------------------------------
372 # Mixin for classes that work with connection files
374 # Mixin for classes that work with connection files
373 #-----------------------------------------------------------------------------
375 #-----------------------------------------------------------------------------
374
376
375 channel_socket_types = {
377 channel_socket_types = {
376 'hb' : zmq.REQ,
378 'hb' : zmq.REQ,
377 'shell' : zmq.DEALER,
379 'shell' : zmq.DEALER,
378 'iopub' : zmq.SUB,
380 'iopub' : zmq.SUB,
379 'stdin' : zmq.DEALER,
381 'stdin' : zmq.DEALER,
380 'control': zmq.DEALER,
382 'control': zmq.DEALER,
381 }
383 }
382
384
383 port_names = [ "%s_port" % channel for channel in ('shell', 'stdin', 'iopub', 'hb', 'control')]
385 port_names = [ "%s_port" % channel for channel in ('shell', 'stdin', 'iopub', 'hb', 'control')]
384
386
385 class ConnectionFileMixin(Configurable):
387 class ConnectionFileMixin(Configurable):
386 """Mixin for configurable classes that work with connection files"""
388 """Mixin for configurable classes that work with connection files"""
387
389
388 # The addresses for the communication channels
390 # The addresses for the communication channels
389 connection_file = Unicode('')
391 connection_file = Unicode('')
390 _connection_file_written = Bool(False)
392 _connection_file_written = Bool(False)
391
393
392 transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
394 transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
393
395
394 ip = Unicode(LOCALHOST, config=True,
396 ip = Unicode(config=True,
395 help="""Set the kernel\'s IP address [default localhost].
397 help="""Set the kernel\'s IP address [default localhost].
396 If the IP address is something other than localhost, then
398 If the IP address is something other than localhost, then
397 Consoles on other machines will be able to connect
399 Consoles on other machines will be able to connect
398 to the Kernel, so be careful!"""
400 to the Kernel, so be careful!"""
399 )
401 )
400
402
401 def _ip_default(self):
403 def _ip_default(self):
402 if self.transport == 'ipc':
404 if self.transport == 'ipc':
403 if self.connection_file:
405 if self.connection_file:
404 return os.path.splitext(self.connection_file)[0] + '-ipc'
406 return os.path.splitext(self.connection_file)[0] + '-ipc'
405 else:
407 else:
406 return 'kernel-ipc'
408 return 'kernel-ipc'
407 else:
409 else:
408 return LOCALHOST
410 return localhost()
409
411
410 def _ip_changed(self, name, old, new):
412 def _ip_changed(self, name, old, new):
411 if new == '*':
413 if new == '*':
412 self.ip = '0.0.0.0'
414 self.ip = '0.0.0.0'
413
415
414 # protected traits
416 # protected traits
415
417
416 shell_port = Integer(0)
418 shell_port = Integer(0)
417 iopub_port = Integer(0)
419 iopub_port = Integer(0)
418 stdin_port = Integer(0)
420 stdin_port = Integer(0)
419 control_port = Integer(0)
421 control_port = Integer(0)
420 hb_port = Integer(0)
422 hb_port = Integer(0)
421
423
422 @property
424 @property
423 def ports(self):
425 def ports(self):
424 return [ getattr(self, name) for name in port_names ]
426 return [ getattr(self, name) for name in port_names ]
425
427
426 #--------------------------------------------------------------------------
428 #--------------------------------------------------------------------------
427 # Connection and ipc file management
429 # Connection and ipc file management
428 #--------------------------------------------------------------------------
430 #--------------------------------------------------------------------------
429
431
430 def get_connection_info(self):
432 def get_connection_info(self):
431 """return the connection info as a dict"""
433 """return the connection info as a dict"""
432 return dict(
434 return dict(
433 transport=self.transport,
435 transport=self.transport,
434 ip=self.ip,
436 ip=self.ip,
435 shell_port=self.shell_port,
437 shell_port=self.shell_port,
436 iopub_port=self.iopub_port,
438 iopub_port=self.iopub_port,
437 stdin_port=self.stdin_port,
439 stdin_port=self.stdin_port,
438 hb_port=self.hb_port,
440 hb_port=self.hb_port,
439 control_port=self.control_port,
441 control_port=self.control_port,
440 signature_scheme=self.session.signature_scheme,
442 signature_scheme=self.session.signature_scheme,
441 key=self.session.key,
443 key=self.session.key,
442 )
444 )
443
445
444 def cleanup_connection_file(self):
446 def cleanup_connection_file(self):
445 """Cleanup connection file *if we wrote it*
447 """Cleanup connection file *if we wrote it*
446
448
447 Will not raise if the connection file was already removed somehow.
449 Will not raise if the connection file was already removed somehow.
448 """
450 """
449 if self._connection_file_written:
451 if self._connection_file_written:
450 # cleanup connection files on full shutdown of kernel we started
452 # cleanup connection files on full shutdown of kernel we started
451 self._connection_file_written = False
453 self._connection_file_written = False
452 try:
454 try:
453 os.remove(self.connection_file)
455 os.remove(self.connection_file)
454 except (IOError, OSError, AttributeError):
456 except (IOError, OSError, AttributeError):
455 pass
457 pass
456
458
457 def cleanup_ipc_files(self):
459 def cleanup_ipc_files(self):
458 """Cleanup ipc files if we wrote them."""
460 """Cleanup ipc files if we wrote them."""
459 if self.transport != 'ipc':
461 if self.transport != 'ipc':
460 return
462 return
461 for port in self.ports:
463 for port in self.ports:
462 ipcfile = "%s-%i" % (self.ip, port)
464 ipcfile = "%s-%i" % (self.ip, port)
463 try:
465 try:
464 os.remove(ipcfile)
466 os.remove(ipcfile)
465 except (IOError, OSError):
467 except (IOError, OSError):
466 pass
468 pass
467
469
468 def write_connection_file(self):
470 def write_connection_file(self):
469 """Write connection info to JSON dict in self.connection_file."""
471 """Write connection info to JSON dict in self.connection_file."""
470 if self._connection_file_written:
472 if self._connection_file_written:
471 return
473 return
472
474
473 self.connection_file, cfg = write_connection_file(self.connection_file,
475 self.connection_file, cfg = write_connection_file(self.connection_file,
474 transport=self.transport, ip=self.ip, key=self.session.key,
476 transport=self.transport, ip=self.ip, key=self.session.key,
475 stdin_port=self.stdin_port, iopub_port=self.iopub_port,
477 stdin_port=self.stdin_port, iopub_port=self.iopub_port,
476 shell_port=self.shell_port, hb_port=self.hb_port,
478 shell_port=self.shell_port, hb_port=self.hb_port,
477 control_port=self.control_port,
479 control_port=self.control_port,
478 signature_scheme=self.session.signature_scheme,
480 signature_scheme=self.session.signature_scheme,
479 )
481 )
480 # write_connection_file also sets default ports:
482 # write_connection_file also sets default ports:
481 for name in port_names:
483 for name in port_names:
482 setattr(self, name, cfg[name])
484 setattr(self, name, cfg[name])
483
485
484 self._connection_file_written = True
486 self._connection_file_written = True
485
487
486 def load_connection_file(self):
488 def load_connection_file(self):
487 """Load connection info from JSON dict in self.connection_file."""
489 """Load connection info from JSON dict in self.connection_file."""
488 with open(self.connection_file) as f:
490 with open(self.connection_file) as f:
489 cfg = json.loads(f.read())
491 cfg = json.loads(f.read())
490
492
491 self.transport = cfg.get('transport', 'tcp')
493 self.transport = cfg.get('transport', 'tcp')
492 self.ip = cfg['ip']
494 self.ip = cfg['ip']
493 for name in port_names:
495 for name in port_names:
494 setattr(self, name, cfg[name])
496 setattr(self, name, cfg[name])
495 if 'key' in cfg:
497 if 'key' in cfg:
496 self.session.key = str_to_bytes(cfg['key'])
498 self.session.key = str_to_bytes(cfg['key'])
497 if cfg.get('signature_scheme'):
499 if cfg.get('signature_scheme'):
498 self.session.signature_scheme = cfg['signature_scheme']
500 self.session.signature_scheme = cfg['signature_scheme']
499
501
500 #--------------------------------------------------------------------------
502 #--------------------------------------------------------------------------
501 # Creating connected sockets
503 # Creating connected sockets
502 #--------------------------------------------------------------------------
504 #--------------------------------------------------------------------------
503
505
504 def _make_url(self, channel):
506 def _make_url(self, channel):
505 """Make a ZeroMQ URL for a given channel."""
507 """Make a ZeroMQ URL for a given channel."""
506 transport = self.transport
508 transport = self.transport
507 ip = self.ip
509 ip = self.ip
508 port = getattr(self, '%s_port' % channel)
510 port = getattr(self, '%s_port' % channel)
509
511
510 if transport == 'tcp':
512 if transport == 'tcp':
511 return "tcp://%s:%i" % (ip, port)
513 return "tcp://%s:%i" % (ip, port)
512 else:
514 else:
513 return "%s://%s-%s" % (transport, ip, port)
515 return "%s://%s-%s" % (transport, ip, port)
514
516
515 def _create_connected_socket(self, channel, identity=None):
517 def _create_connected_socket(self, channel, identity=None):
516 """Create a zmq Socket and connect it to the kernel."""
518 """Create a zmq Socket and connect it to the kernel."""
517 url = self._make_url(channel)
519 url = self._make_url(channel)
518 socket_type = channel_socket_types[channel]
520 socket_type = channel_socket_types[channel]
519 self.log.info("Connecting to: %s" % url)
521 self.log.info("Connecting to: %s" % url)
520 sock = self.context.socket(socket_type)
522 sock = self.context.socket(socket_type)
521 if identity:
523 if identity:
522 sock.identity = identity
524 sock.identity = identity
523 sock.connect(url)
525 sock.connect(url)
524 return sock
526 return sock
525
527
526 def connect_iopub(self, identity=None):
528 def connect_iopub(self, identity=None):
527 """return zmq Socket connected to the IOPub channel"""
529 """return zmq Socket connected to the IOPub channel"""
528 sock = self._create_connected_socket('iopub', identity=identity)
530 sock = self._create_connected_socket('iopub', identity=identity)
529 sock.setsockopt(zmq.SUBSCRIBE, b'')
531 sock.setsockopt(zmq.SUBSCRIBE, b'')
530 return sock
532 return sock
531
533
532 def connect_shell(self, identity=None):
534 def connect_shell(self, identity=None):
533 """return zmq Socket connected to the Shell channel"""
535 """return zmq Socket connected to the Shell channel"""
534 return self._create_connected_socket('shell', identity=identity)
536 return self._create_connected_socket('shell', identity=identity)
535
537
536 def connect_stdin(self, identity=None):
538 def connect_stdin(self, identity=None):
537 """return zmq Socket connected to the StdIn channel"""
539 """return zmq Socket connected to the StdIn channel"""
538 return self._create_connected_socket('stdin', identity=identity)
540 return self._create_connected_socket('stdin', identity=identity)
539
541
540 def connect_hb(self, identity=None):
542 def connect_hb(self, identity=None):
541 """return zmq Socket connected to the Heartbeat channel"""
543 """return zmq Socket connected to the Heartbeat channel"""
542 return self._create_connected_socket('hb', identity=identity)
544 return self._create_connected_socket('hb', identity=identity)
543
545
544 def connect_control(self, identity=None):
546 def connect_control(self, identity=None):
545 """return zmq Socket connected to the Heartbeat channel"""
547 """return zmq Socket connected to the Heartbeat channel"""
546 return self._create_connected_socket('control', identity=identity)
548 return self._create_connected_socket('control', identity=identity)
547
549
548
550
549 __all__ = [
551 __all__ = [
550 'write_connection_file',
552 'write_connection_file',
551 'get_connection_file',
553 'get_connection_file',
552 'find_connection_file',
554 'find_connection_file',
553 'get_connection_info',
555 'get_connection_info',
554 'connect_qtconsole',
556 'connect_qtconsole',
555 'tunnel_to_kernel',
557 'tunnel_to_kernel',
556 ]
558 ]
@@ -1,379 +1,378 b''
1 """Base class to manage a running kernel
1 """Base class to manage a running kernel"""
2 """
3
2
4 #-----------------------------------------------------------------------------
3 #-----------------------------------------------------------------------------
5 # Copyright (C) 2013 The IPython Development Team
4 # Copyright (C) 2013 The IPython Development Team
6 #
5 #
7 # Distributed under the terms of the BSD License. The full license is in
6 # Distributed under the terms of the BSD License. The full license is in
8 # the file COPYING, distributed as part of this software.
7 # the file COPYING, distributed as part of this software.
9 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
10
9
11 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
12 # Imports
11 # Imports
13 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
14
13
15 from __future__ import absolute_import
14 from __future__ import absolute_import
16
15
17 # Standard library imports
16 # Standard library imports
18 import signal
17 import signal
19 import sys
18 import sys
20 import time
19 import time
21
20
22 import zmq
21 import zmq
23
22
24 # Local imports
23 # Local imports
25 from IPython.config.configurable import LoggingConfigurable
24 from IPython.config.configurable import LoggingConfigurable
26 from IPython.utils.importstring import import_item
25 from IPython.utils.importstring import import_item
27 from IPython.utils.localinterfaces import LOCAL_IPS
26 from IPython.utils.localinterfaces import is_local_ip, local_ips
28 from IPython.utils.traitlets import (
27 from IPython.utils.traitlets import (
29 Any, Instance, Unicode, List, Bool, Type, DottedObjectName
28 Any, Instance, Unicode, List, Bool, Type, DottedObjectName
30 )
29 )
31 from IPython.kernel import (
30 from IPython.kernel import (
32 make_ipkernel_cmd,
31 make_ipkernel_cmd,
33 launch_kernel,
32 launch_kernel,
34 )
33 )
35 from .connect import ConnectionFileMixin
34 from .connect import ConnectionFileMixin
36 from .zmq.session import Session
35 from .zmq.session import Session
37 from .managerabc import (
36 from .managerabc import (
38 KernelManagerABC
37 KernelManagerABC
39 )
38 )
40
39
41 #-----------------------------------------------------------------------------
40 #-----------------------------------------------------------------------------
42 # Main kernel manager class
41 # Main kernel manager class
43 #-----------------------------------------------------------------------------
42 #-----------------------------------------------------------------------------
44
43
45 class KernelManager(LoggingConfigurable, ConnectionFileMixin):
44 class KernelManager(LoggingConfigurable, ConnectionFileMixin):
46 """Manages a single kernel in a subprocess on this host.
45 """Manages a single kernel in a subprocess on this host.
47
46
48 This version starts kernels with Popen.
47 This version starts kernels with Popen.
49 """
48 """
50
49
51 # The PyZMQ Context to use for communication with the kernel.
50 # The PyZMQ Context to use for communication with the kernel.
52 context = Instance(zmq.Context)
51 context = Instance(zmq.Context)
53 def _context_default(self):
52 def _context_default(self):
54 return zmq.Context.instance()
53 return zmq.Context.instance()
55
54
56 # The Session to use for communication with the kernel.
55 # The Session to use for communication with the kernel.
57 session = Instance(Session)
56 session = Instance(Session)
58 def _session_default(self):
57 def _session_default(self):
59 return Session(parent=self)
58 return Session(parent=self)
60
59
61 # the class to create with our `client` method
60 # the class to create with our `client` method
62 client_class = DottedObjectName('IPython.kernel.blocking.BlockingKernelClient')
61 client_class = DottedObjectName('IPython.kernel.blocking.BlockingKernelClient')
63 client_factory = Type()
62 client_factory = Type()
64 def _client_class_changed(self, name, old, new):
63 def _client_class_changed(self, name, old, new):
65 self.client_factory = import_item(str(new))
64 self.client_factory = import_item(str(new))
66
65
67 # The kernel process with which the KernelManager is communicating.
66 # The kernel process with which the KernelManager is communicating.
68 # generally a Popen instance
67 # generally a Popen instance
69 kernel = Any()
68 kernel = Any()
70
69
71 kernel_cmd = List(Unicode, config=True,
70 kernel_cmd = List(Unicode, config=True,
72 help="""The Popen Command to launch the kernel.
71 help="""The Popen Command to launch the kernel.
73 Override this if you have a custom
72 Override this if you have a custom
74 """
73 """
75 )
74 )
76
75
77 def _kernel_cmd_changed(self, name, old, new):
76 def _kernel_cmd_changed(self, name, old, new):
78 self.ipython_kernel = False
77 self.ipython_kernel = False
79
78
80 ipython_kernel = Bool(True)
79 ipython_kernel = Bool(True)
81
80
82 # Protected traits
81 # Protected traits
83 _launch_args = Any()
82 _launch_args = Any()
84 _control_socket = Any()
83 _control_socket = Any()
85
84
86 _restarter = Any()
85 _restarter = Any()
87
86
88 autorestart = Bool(False, config=True,
87 autorestart = Bool(False, config=True,
89 help="""Should we autorestart the kernel if it dies."""
88 help="""Should we autorestart the kernel if it dies."""
90 )
89 )
91
90
92 def __del__(self):
91 def __del__(self):
93 self._close_control_socket()
92 self._close_control_socket()
94 self.cleanup_connection_file()
93 self.cleanup_connection_file()
95
94
96 #--------------------------------------------------------------------------
95 #--------------------------------------------------------------------------
97 # Kernel restarter
96 # Kernel restarter
98 #--------------------------------------------------------------------------
97 #--------------------------------------------------------------------------
99
98
100 def start_restarter(self):
99 def start_restarter(self):
101 pass
100 pass
102
101
103 def stop_restarter(self):
102 def stop_restarter(self):
104 pass
103 pass
105
104
106 def add_restart_callback(self, callback, event='restart'):
105 def add_restart_callback(self, callback, event='restart'):
107 """register a callback to be called when a kernel is restarted"""
106 """register a callback to be called when a kernel is restarted"""
108 if self._restarter is None:
107 if self._restarter is None:
109 return
108 return
110 self._restarter.add_callback(callback, event)
109 self._restarter.add_callback(callback, event)
111
110
112 def remove_restart_callback(self, callback, event='restart'):
111 def remove_restart_callback(self, callback, event='restart'):
113 """unregister a callback to be called when a kernel is restarted"""
112 """unregister a callback to be called when a kernel is restarted"""
114 if self._restarter is None:
113 if self._restarter is None:
115 return
114 return
116 self._restarter.remove_callback(callback, event)
115 self._restarter.remove_callback(callback, event)
117
116
118 #--------------------------------------------------------------------------
117 #--------------------------------------------------------------------------
119 # create a Client connected to our Kernel
118 # create a Client connected to our Kernel
120 #--------------------------------------------------------------------------
119 #--------------------------------------------------------------------------
121
120
122 def client(self, **kwargs):
121 def client(self, **kwargs):
123 """Create a client configured to connect to our kernel"""
122 """Create a client configured to connect to our kernel"""
124 if self.client_factory is None:
123 if self.client_factory is None:
125 self.client_factory = import_item(self.client_class)
124 self.client_factory = import_item(self.client_class)
126
125
127 kw = {}
126 kw = {}
128 kw.update(self.get_connection_info())
127 kw.update(self.get_connection_info())
129 kw.update(dict(
128 kw.update(dict(
130 connection_file=self.connection_file,
129 connection_file=self.connection_file,
131 session=self.session,
130 session=self.session,
132 parent=self,
131 parent=self,
133 ))
132 ))
134
133
135 # add kwargs last, for manual overrides
134 # add kwargs last, for manual overrides
136 kw.update(kwargs)
135 kw.update(kwargs)
137 return self.client_factory(**kw)
136 return self.client_factory(**kw)
138
137
139 #--------------------------------------------------------------------------
138 #--------------------------------------------------------------------------
140 # Kernel management
139 # Kernel management
141 #--------------------------------------------------------------------------
140 #--------------------------------------------------------------------------
142
141
143 def format_kernel_cmd(self, **kw):
142 def format_kernel_cmd(self, **kw):
144 """format templated args (e.g. {connection_file})"""
143 """format templated args (e.g. {connection_file})"""
145 if self.kernel_cmd:
144 if self.kernel_cmd:
146 cmd = self.kernel_cmd
145 cmd = self.kernel_cmd
147 else:
146 else:
148 cmd = make_ipkernel_cmd(
147 cmd = make_ipkernel_cmd(
149 'from IPython.kernel.zmq.kernelapp import main; main()',
148 'from IPython.kernel.zmq.kernelapp import main; main()',
150 **kw
149 **kw
151 )
150 )
152 ns = dict(connection_file=self.connection_file)
151 ns = dict(connection_file=self.connection_file)
153 ns.update(self._launch_args)
152 ns.update(self._launch_args)
154 return [ c.format(**ns) for c in cmd ]
153 return [ c.format(**ns) for c in cmd ]
155
154
156 def _launch_kernel(self, kernel_cmd, **kw):
155 def _launch_kernel(self, kernel_cmd, **kw):
157 """actually launch the kernel
156 """actually launch the kernel
158
157
159 override in a subclass to launch kernel subprocesses differently
158 override in a subclass to launch kernel subprocesses differently
160 """
159 """
161 return launch_kernel(kernel_cmd, **kw)
160 return launch_kernel(kernel_cmd, **kw)
162
161
163 # Control socket used for polite kernel shutdown
162 # Control socket used for polite kernel shutdown
164
163
165 def _connect_control_socket(self):
164 def _connect_control_socket(self):
166 if self._control_socket is None:
165 if self._control_socket is None:
167 self._control_socket = self.connect_control()
166 self._control_socket = self.connect_control()
168 self._control_socket.linger = 100
167 self._control_socket.linger = 100
169
168
170 def _close_control_socket(self):
169 def _close_control_socket(self):
171 if self._control_socket is None:
170 if self._control_socket is None:
172 return
171 return
173 self._control_socket.close()
172 self._control_socket.close()
174 self._control_socket = None
173 self._control_socket = None
175
174
176 def start_kernel(self, **kw):
175 def start_kernel(self, **kw):
177 """Starts a kernel on this host in a separate process.
176 """Starts a kernel on this host in a separate process.
178
177
179 If random ports (port=0) are being used, this method must be called
178 If random ports (port=0) are being used, this method must be called
180 before the channels are created.
179 before the channels are created.
181
180
182 Parameters:
181 Parameters:
183 -----------
182 -----------
184 **kw : optional
183 **kw : optional
185 keyword arguments that are passed down to build the kernel_cmd
184 keyword arguments that are passed down to build the kernel_cmd
186 and launching the kernel (e.g. Popen kwargs).
185 and launching the kernel (e.g. Popen kwargs).
187 """
186 """
188 if self.transport == 'tcp' and self.ip not in LOCAL_IPS:
187 if self.transport == 'tcp' and not is_local_ip(self.ip):
189 raise RuntimeError("Can only launch a kernel on a local interface. "
188 raise RuntimeError("Can only launch a kernel on a local interface. "
190 "Make sure that the '*_address' attributes are "
189 "Make sure that the '*_address' attributes are "
191 "configured properly. "
190 "configured properly. "
192 "Currently valid addresses are: %s"%LOCAL_IPS
191 "Currently valid addresses are: %s" % local_ips()
193 )
192 )
194
193
195 # write connection file / get default ports
194 # write connection file / get default ports
196 self.write_connection_file()
195 self.write_connection_file()
197
196
198 # save kwargs for use in restart
197 # save kwargs for use in restart
199 self._launch_args = kw.copy()
198 self._launch_args = kw.copy()
200 # build the Popen cmd
199 # build the Popen cmd
201 kernel_cmd = self.format_kernel_cmd(**kw)
200 kernel_cmd = self.format_kernel_cmd(**kw)
202 # launch the kernel subprocess
201 # launch the kernel subprocess
203 self.kernel = self._launch_kernel(kernel_cmd,
202 self.kernel = self._launch_kernel(kernel_cmd,
204 ipython_kernel=self.ipython_kernel,
203 ipython_kernel=self.ipython_kernel,
205 **kw)
204 **kw)
206 self.start_restarter()
205 self.start_restarter()
207 self._connect_control_socket()
206 self._connect_control_socket()
208
207
209 def _send_shutdown_request(self, restart=False):
208 def _send_shutdown_request(self, restart=False):
210 """TODO: send a shutdown request via control channel"""
209 """TODO: send a shutdown request via control channel"""
211 content = dict(restart=restart)
210 content = dict(restart=restart)
212 msg = self.session.msg("shutdown_request", content=content)
211 msg = self.session.msg("shutdown_request", content=content)
213 self.session.send(self._control_socket, msg)
212 self.session.send(self._control_socket, msg)
214
213
215 def shutdown_kernel(self, now=False, restart=False):
214 def shutdown_kernel(self, now=False, restart=False):
216 """Attempts to the stop the kernel process cleanly.
215 """Attempts to the stop the kernel process cleanly.
217
216
218 This attempts to shutdown the kernels cleanly by:
217 This attempts to shutdown the kernels cleanly by:
219
218
220 1. Sending it a shutdown message over the shell channel.
219 1. Sending it a shutdown message over the shell channel.
221 2. If that fails, the kernel is shutdown forcibly by sending it
220 2. If that fails, the kernel is shutdown forcibly by sending it
222 a signal.
221 a signal.
223
222
224 Parameters:
223 Parameters:
225 -----------
224 -----------
226 now : bool
225 now : bool
227 Should the kernel be forcible killed *now*. This skips the
226 Should the kernel be forcible killed *now*. This skips the
228 first, nice shutdown attempt.
227 first, nice shutdown attempt.
229 restart: bool
228 restart: bool
230 Will this kernel be restarted after it is shutdown. When this
229 Will this kernel be restarted after it is shutdown. When this
231 is True, connection files will not be cleaned up.
230 is True, connection files will not be cleaned up.
232 """
231 """
233 # Stop monitoring for restarting while we shutdown.
232 # Stop monitoring for restarting while we shutdown.
234 self.stop_restarter()
233 self.stop_restarter()
235
234
236 # FIXME: Shutdown does not work on Windows due to ZMQ errors!
235 # FIXME: Shutdown does not work on Windows due to ZMQ errors!
237 if sys.platform == 'win32':
236 if sys.platform == 'win32':
238 self._kill_kernel()
237 self._kill_kernel()
239 return
238 return
240
239
241 if now:
240 if now:
242 if self.has_kernel:
241 if self.has_kernel:
243 self._kill_kernel()
242 self._kill_kernel()
244 else:
243 else:
245 # Don't send any additional kernel kill messages immediately, to give
244 # Don't send any additional kernel kill messages immediately, to give
246 # the kernel a chance to properly execute shutdown actions. Wait for at
245 # the kernel a chance to properly execute shutdown actions. Wait for at
247 # most 1s, checking every 0.1s.
246 # most 1s, checking every 0.1s.
248 self._send_shutdown_request(restart=restart)
247 self._send_shutdown_request(restart=restart)
249 for i in range(10):
248 for i in range(10):
250 if self.is_alive():
249 if self.is_alive():
251 time.sleep(0.1)
250 time.sleep(0.1)
252 else:
251 else:
253 break
252 break
254 else:
253 else:
255 # OK, we've waited long enough.
254 # OK, we've waited long enough.
256 if self.has_kernel:
255 if self.has_kernel:
257 self._kill_kernel()
256 self._kill_kernel()
258
257
259 if not restart:
258 if not restart:
260 self.cleanup_connection_file()
259 self.cleanup_connection_file()
261 self.cleanup_ipc_files()
260 self.cleanup_ipc_files()
262 else:
261 else:
263 self.cleanup_ipc_files()
262 self.cleanup_ipc_files()
264
263
265 def restart_kernel(self, now=False, **kw):
264 def restart_kernel(self, now=False, **kw):
266 """Restarts a kernel with the arguments that were used to launch it.
265 """Restarts a kernel with the arguments that were used to launch it.
267
266
268 If the old kernel was launched with random ports, the same ports will be
267 If the old kernel was launched with random ports, the same ports will be
269 used for the new kernel. The same connection file is used again.
268 used for the new kernel. The same connection file is used again.
270
269
271 Parameters
270 Parameters
272 ----------
271 ----------
273 now : bool, optional
272 now : bool, optional
274 If True, the kernel is forcefully restarted *immediately*, without
273 If True, the kernel is forcefully restarted *immediately*, without
275 having a chance to do any cleanup action. Otherwise the kernel is
274 having a chance to do any cleanup action. Otherwise the kernel is
276 given 1s to clean up before a forceful restart is issued.
275 given 1s to clean up before a forceful restart is issued.
277
276
278 In all cases the kernel is restarted, the only difference is whether
277 In all cases the kernel is restarted, the only difference is whether
279 it is given a chance to perform a clean shutdown or not.
278 it is given a chance to perform a clean shutdown or not.
280
279
281 **kw : optional
280 **kw : optional
282 Any options specified here will overwrite those used to launch the
281 Any options specified here will overwrite those used to launch the
283 kernel.
282 kernel.
284 """
283 """
285 if self._launch_args is None:
284 if self._launch_args is None:
286 raise RuntimeError("Cannot restart the kernel. "
285 raise RuntimeError("Cannot restart the kernel. "
287 "No previous call to 'start_kernel'.")
286 "No previous call to 'start_kernel'.")
288 else:
287 else:
289 # Stop currently running kernel.
288 # Stop currently running kernel.
290 self.shutdown_kernel(now=now, restart=True)
289 self.shutdown_kernel(now=now, restart=True)
291
290
292 # Start new kernel.
291 # Start new kernel.
293 self._launch_args.update(kw)
292 self._launch_args.update(kw)
294 self.start_kernel(**self._launch_args)
293 self.start_kernel(**self._launch_args)
295
294
296 # FIXME: Messages get dropped in Windows due to probable ZMQ bug
295 # FIXME: Messages get dropped in Windows due to probable ZMQ bug
297 # unless there is some delay here.
296 # unless there is some delay here.
298 if sys.platform == 'win32':
297 if sys.platform == 'win32':
299 time.sleep(0.2)
298 time.sleep(0.2)
300
299
301 @property
300 @property
302 def has_kernel(self):
301 def has_kernel(self):
303 """Has a kernel been started that we are managing."""
302 """Has a kernel been started that we are managing."""
304 return self.kernel is not None
303 return self.kernel is not None
305
304
306 def _kill_kernel(self):
305 def _kill_kernel(self):
307 """Kill the running kernel.
306 """Kill the running kernel.
308
307
309 This is a private method, callers should use shutdown_kernel(now=True).
308 This is a private method, callers should use shutdown_kernel(now=True).
310 """
309 """
311 if self.has_kernel:
310 if self.has_kernel:
312
311
313 # Signal the kernel to terminate (sends SIGKILL on Unix and calls
312 # Signal the kernel to terminate (sends SIGKILL on Unix and calls
314 # TerminateProcess() on Win32).
313 # TerminateProcess() on Win32).
315 try:
314 try:
316 self.kernel.kill()
315 self.kernel.kill()
317 except OSError as e:
316 except OSError as e:
318 # In Windows, we will get an Access Denied error if the process
317 # In Windows, we will get an Access Denied error if the process
319 # has already terminated. Ignore it.
318 # has already terminated. Ignore it.
320 if sys.platform == 'win32':
319 if sys.platform == 'win32':
321 if e.winerror != 5:
320 if e.winerror != 5:
322 raise
321 raise
323 # On Unix, we may get an ESRCH error if the process has already
322 # On Unix, we may get an ESRCH error if the process has already
324 # terminated. Ignore it.
323 # terminated. Ignore it.
325 else:
324 else:
326 from errno import ESRCH
325 from errno import ESRCH
327 if e.errno != ESRCH:
326 if e.errno != ESRCH:
328 raise
327 raise
329
328
330 # Block until the kernel terminates.
329 # Block until the kernel terminates.
331 self.kernel.wait()
330 self.kernel.wait()
332 self.kernel = None
331 self.kernel = None
333 else:
332 else:
334 raise RuntimeError("Cannot kill kernel. No kernel is running!")
333 raise RuntimeError("Cannot kill kernel. No kernel is running!")
335
334
336 def interrupt_kernel(self):
335 def interrupt_kernel(self):
337 """Interrupts the kernel by sending it a signal.
336 """Interrupts the kernel by sending it a signal.
338
337
339 Unlike ``signal_kernel``, this operation is well supported on all
338 Unlike ``signal_kernel``, this operation is well supported on all
340 platforms.
339 platforms.
341 """
340 """
342 if self.has_kernel:
341 if self.has_kernel:
343 if sys.platform == 'win32':
342 if sys.platform == 'win32':
344 from .zmq.parentpoller import ParentPollerWindows as Poller
343 from .zmq.parentpoller import ParentPollerWindows as Poller
345 Poller.send_interrupt(self.kernel.win32_interrupt_event)
344 Poller.send_interrupt(self.kernel.win32_interrupt_event)
346 else:
345 else:
347 self.kernel.send_signal(signal.SIGINT)
346 self.kernel.send_signal(signal.SIGINT)
348 else:
347 else:
349 raise RuntimeError("Cannot interrupt kernel. No kernel is running!")
348 raise RuntimeError("Cannot interrupt kernel. No kernel is running!")
350
349
351 def signal_kernel(self, signum):
350 def signal_kernel(self, signum):
352 """Sends a signal to the kernel.
351 """Sends a signal to the kernel.
353
352
354 Note that since only SIGTERM is supported on Windows, this function is
353 Note that since only SIGTERM is supported on Windows, this function is
355 only useful on Unix systems.
354 only useful on Unix systems.
356 """
355 """
357 if self.has_kernel:
356 if self.has_kernel:
358 self.kernel.send_signal(signum)
357 self.kernel.send_signal(signum)
359 else:
358 else:
360 raise RuntimeError("Cannot signal kernel. No kernel is running!")
359 raise RuntimeError("Cannot signal kernel. No kernel is running!")
361
360
362 def is_alive(self):
361 def is_alive(self):
363 """Is the kernel process still running?"""
362 """Is the kernel process still running?"""
364 if self.has_kernel:
363 if self.has_kernel:
365 if self.kernel.poll() is None:
364 if self.kernel.poll() is None:
366 return True
365 return True
367 else:
366 else:
368 return False
367 return False
369 else:
368 else:
370 # we don't have a kernel
369 # we don't have a kernel
371 return False
370 return False
372
371
373
372
374 #-----------------------------------------------------------------------------
373 #-----------------------------------------------------------------------------
375 # ABC Registration
374 # ABC Registration
376 #-----------------------------------------------------------------------------
375 #-----------------------------------------------------------------------------
377
376
378 KernelManagerABC.register(KernelManager)
377 KernelManagerABC.register(KernelManager)
379
378
@@ -1,78 +1,78 b''
1 """Tests for the notebook kernel and session manager."""
1 """Tests for the notebook kernel and session manager."""
2
2
3 from subprocess import PIPE
3 from subprocess import PIPE
4 import time
4 import time
5 from unittest import TestCase
5 from unittest import TestCase
6
6
7 from IPython.testing import decorators as dec
7 from IPython.testing import decorators as dec
8
8
9 from IPython.config.loader import Config
9 from IPython.config.loader import Config
10 from IPython.utils.localinterfaces import LOCALHOST
10 from IPython.utils.localinterfaces import localhost
11 from IPython.kernel import KernelManager
11 from IPython.kernel import KernelManager
12 from IPython.kernel.multikernelmanager import MultiKernelManager
12 from IPython.kernel.multikernelmanager import MultiKernelManager
13
13
14 class TestKernelManager(TestCase):
14 class TestKernelManager(TestCase):
15
15
16 def _get_tcp_km(self):
16 def _get_tcp_km(self):
17 c = Config()
17 c = Config()
18 km = MultiKernelManager(config=c)
18 km = MultiKernelManager(config=c)
19 return km
19 return km
20
20
21 def _get_ipc_km(self):
21 def _get_ipc_km(self):
22 c = Config()
22 c = Config()
23 c.KernelManager.transport = 'ipc'
23 c.KernelManager.transport = 'ipc'
24 c.KernelManager.ip = 'test'
24 c.KernelManager.ip = 'test'
25 km = MultiKernelManager(config=c)
25 km = MultiKernelManager(config=c)
26 return km
26 return km
27
27
28 def _run_lifecycle(self, km):
28 def _run_lifecycle(self, km):
29 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
29 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
30 self.assertTrue(km.is_alive(kid))
30 self.assertTrue(km.is_alive(kid))
31 self.assertTrue(kid in km)
31 self.assertTrue(kid in km)
32 self.assertTrue(kid in km.list_kernel_ids())
32 self.assertTrue(kid in km.list_kernel_ids())
33 self.assertEqual(len(km),1)
33 self.assertEqual(len(km),1)
34 km.restart_kernel(kid, now=True)
34 km.restart_kernel(kid, now=True)
35 self.assertTrue(km.is_alive(kid))
35 self.assertTrue(km.is_alive(kid))
36 self.assertTrue(kid in km.list_kernel_ids())
36 self.assertTrue(kid in km.list_kernel_ids())
37 km.interrupt_kernel(kid)
37 km.interrupt_kernel(kid)
38 k = km.get_kernel(kid)
38 k = km.get_kernel(kid)
39 self.assertTrue(isinstance(k, KernelManager))
39 self.assertTrue(isinstance(k, KernelManager))
40 km.shutdown_kernel(kid, now=True)
40 km.shutdown_kernel(kid, now=True)
41 self.assertTrue(not kid in km)
41 self.assertTrue(not kid in km)
42
42
43 def _run_cinfo(self, km, transport, ip):
43 def _run_cinfo(self, km, transport, ip):
44 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
44 kid = km.start_kernel(stdout=PIPE, stderr=PIPE)
45 k = km.get_kernel(kid)
45 k = km.get_kernel(kid)
46 cinfo = km.get_connection_info(kid)
46 cinfo = km.get_connection_info(kid)
47 self.assertEqual(transport, cinfo['transport'])
47 self.assertEqual(transport, cinfo['transport'])
48 self.assertEqual(ip, cinfo['ip'])
48 self.assertEqual(ip, cinfo['ip'])
49 self.assertTrue('stdin_port' in cinfo)
49 self.assertTrue('stdin_port' in cinfo)
50 self.assertTrue('iopub_port' in cinfo)
50 self.assertTrue('iopub_port' in cinfo)
51 stream = km.connect_iopub(kid)
51 stream = km.connect_iopub(kid)
52 stream.close()
52 stream.close()
53 self.assertTrue('shell_port' in cinfo)
53 self.assertTrue('shell_port' in cinfo)
54 stream = km.connect_shell(kid)
54 stream = km.connect_shell(kid)
55 stream.close()
55 stream.close()
56 self.assertTrue('hb_port' in cinfo)
56 self.assertTrue('hb_port' in cinfo)
57 stream = km.connect_hb(kid)
57 stream = km.connect_hb(kid)
58 stream.close()
58 stream.close()
59 km.shutdown_kernel(kid, now=True)
59 km.shutdown_kernel(kid, now=True)
60
60
61 def test_tcp_lifecycle(self):
61 def test_tcp_lifecycle(self):
62 km = self._get_tcp_km()
62 km = self._get_tcp_km()
63 self._run_lifecycle(km)
63 self._run_lifecycle(km)
64
64
65 def test_tcp_cinfo(self):
65 def test_tcp_cinfo(self):
66 km = self._get_tcp_km()
66 km = self._get_tcp_km()
67 self._run_cinfo(km, 'tcp', LOCALHOST)
67 self._run_cinfo(km, 'tcp', localhost())
68
68
69 @dec.skip_win32
69 @dec.skip_win32
70 def test_ipc_lifecycle(self):
70 def test_ipc_lifecycle(self):
71 km = self._get_ipc_km()
71 km = self._get_ipc_km()
72 self._run_lifecycle(km)
72 self._run_lifecycle(km)
73
73
74 @dec.skip_win32
74 @dec.skip_win32
75 def test_ipc_cinfo(self):
75 def test_ipc_cinfo(self):
76 km = self._get_ipc_km()
76 km = self._get_ipc_km()
77 self._run_cinfo(km, 'ipc', 'test')
77 self._run_cinfo(km, 'ipc', 'test')
78
78
@@ -1,65 +1,67 b''
1 """The client and server for a basic ping-pong style heartbeat.
1 """The client and server for a basic ping-pong style heartbeat.
2 """
2 """
3
3
4 #-----------------------------------------------------------------------------
4 #-----------------------------------------------------------------------------
5 # Copyright (C) 2008-2011 The IPython Development Team
5 # Copyright (C) 2008-2011 The IPython Development Team
6 #
6 #
7 # Distributed under the terms of the BSD License. The full license is in
7 # Distributed under the terms of the BSD License. The full license is in
8 # the file COPYING, distributed as part of this software.
8 # the file COPYING, distributed as part of this software.
9 #-----------------------------------------------------------------------------
9 #-----------------------------------------------------------------------------
10
10
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12 # Imports
12 # Imports
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 import errno
15 import errno
16 import os
16 import os
17 import socket
17 import socket
18 from threading import Thread
18 from threading import Thread
19
19
20 import zmq
20 import zmq
21
21
22 from IPython.utils.localinterfaces import LOCALHOST
22 from IPython.utils.localinterfaces import localhost
23
23
24 #-----------------------------------------------------------------------------
24 #-----------------------------------------------------------------------------
25 # Code
25 # Code
26 #-----------------------------------------------------------------------------
26 #-----------------------------------------------------------------------------
27
27
28
28
29 class Heartbeat(Thread):
29 class Heartbeat(Thread):
30 "A simple ping-pong style heartbeat that runs in a thread."
30 "A simple ping-pong style heartbeat that runs in a thread."
31
31
32 def __init__(self, context, addr=('tcp', LOCALHOST, 0)):
32 def __init__(self, context, addr=None):
33 if addr is None:
34 addr = ('tcp', localhost(), 0)
33 Thread.__init__(self)
35 Thread.__init__(self)
34 self.context = context
36 self.context = context
35 self.transport, self.ip, self.port = addr
37 self.transport, self.ip, self.port = addr
36 if self.port == 0:
38 if self.port == 0:
37 if addr[0] == 'tcp':
39 if addr[0] == 'tcp':
38 s = socket.socket()
40 s = socket.socket()
39 # '*' means all interfaces to 0MQ, which is '' to socket.socket
41 # '*' means all interfaces to 0MQ, which is '' to socket.socket
40 s.bind(('' if self.ip == '*' else self.ip, 0))
42 s.bind(('' if self.ip == '*' else self.ip, 0))
41 self.port = s.getsockname()[1]
43 self.port = s.getsockname()[1]
42 s.close()
44 s.close()
43 elif addr[0] == 'ipc':
45 elif addr[0] == 'ipc':
44 self.port = 1
46 self.port = 1
45 while os.path.exists("%s-%s" % (self.ip, self.port)):
47 while os.path.exists("%s-%s" % (self.ip, self.port)):
46 self.port = self.port + 1
48 self.port = self.port + 1
47 else:
49 else:
48 raise ValueError("Unrecognized zmq transport: %s" % addr[0])
50 raise ValueError("Unrecognized zmq transport: %s" % addr[0])
49 self.addr = (self.ip, self.port)
51 self.addr = (self.ip, self.port)
50 self.daemon = True
52 self.daemon = True
51
53
52 def run(self):
54 def run(self):
53 self.socket = self.context.socket(zmq.REP)
55 self.socket = self.context.socket(zmq.REP)
54 c = ':' if self.transport == 'tcp' else '-'
56 c = ':' if self.transport == 'tcp' else '-'
55 self.socket.bind('%s://%s' % (self.transport, self.ip) + c + str(self.port))
57 self.socket.bind('%s://%s' % (self.transport, self.ip) + c + str(self.port))
56 while True:
58 while True:
57 try:
59 try:
58 zmq.device(zmq.FORWARDER, self.socket, self.socket)
60 zmq.device(zmq.FORWARDER, self.socket, self.socket)
59 except zmq.ZMQError as e:
61 except zmq.ZMQError as e:
60 if e.errno == errno.EINTR:
62 if e.errno == errno.EINTR:
61 continue
63 continue
62 else:
64 else:
63 raise
65 raise
64 else:
66 else:
65 break
67 break
@@ -1,472 +1,473 b''
1 """An Application for launching a kernel
1 """An Application for launching a kernel
2
2
3 Authors
3 Authors
4 -------
4 -------
5 * MinRK
5 * MinRK
6 """
6 """
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (C) 2011 The IPython Development Team
8 # Copyright (C) 2011 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING.txt, distributed as part of this software.
11 # the file COPYING.txt, distributed as part of this software.
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17
17
18 from __future__ import print_function
18 from __future__ import print_function
19
19
20 # Standard library imports
20 # Standard library imports
21 import atexit
21 import atexit
22 import json
22 import json
23 import os
23 import os
24 import sys
24 import sys
25 import signal
25 import signal
26
26
27 # System library imports
27 # System library imports
28 import zmq
28 import zmq
29 from zmq.eventloop import ioloop
29 from zmq.eventloop import ioloop
30 from zmq.eventloop.zmqstream import ZMQStream
30 from zmq.eventloop.zmqstream import ZMQStream
31
31
32 # IPython imports
32 # IPython imports
33 from IPython.core.ultratb import FormattedTB
33 from IPython.core.ultratb import FormattedTB
34 from IPython.core.application import (
34 from IPython.core.application import (
35 BaseIPythonApplication, base_flags, base_aliases, catch_config_error
35 BaseIPythonApplication, base_flags, base_aliases, catch_config_error
36 )
36 )
37 from IPython.core.profiledir import ProfileDir
37 from IPython.core.profiledir import ProfileDir
38 from IPython.core.shellapp import (
38 from IPython.core.shellapp import (
39 InteractiveShellApp, shell_flags, shell_aliases
39 InteractiveShellApp, shell_flags, shell_aliases
40 )
40 )
41 from IPython.utils import io
41 from IPython.utils import io
42 from IPython.utils.localinterfaces import LOCALHOST
42 from IPython.utils.localinterfaces import localhost
43 from IPython.utils.path import filefind
43 from IPython.utils.path import filefind
44 from IPython.utils.py3compat import str_to_bytes
44 from IPython.utils.py3compat import str_to_bytes
45 from IPython.utils.traitlets import (
45 from IPython.utils.traitlets import (
46 Any, Instance, Dict, Unicode, Integer, Bool, CaselessStrEnum,
46 Any, Instance, Dict, Unicode, Integer, Bool, CaselessStrEnum,
47 DottedObjectName,
47 DottedObjectName,
48 )
48 )
49 from IPython.utils.importstring import import_item
49 from IPython.utils.importstring import import_item
50 from IPython.kernel import write_connection_file
50 from IPython.kernel import write_connection_file
51
51
52 # local imports
52 # local imports
53 from heartbeat import Heartbeat
53 from heartbeat import Heartbeat
54 from ipkernel import Kernel
54 from ipkernel import Kernel
55 from parentpoller import ParentPollerUnix, ParentPollerWindows
55 from parentpoller import ParentPollerUnix, ParentPollerWindows
56 from session import (
56 from session import (
57 Session, session_flags, session_aliases, default_secure,
57 Session, session_flags, session_aliases, default_secure,
58 )
58 )
59 from zmqshell import ZMQInteractiveShell
59 from zmqshell import ZMQInteractiveShell
60
60
61 #-----------------------------------------------------------------------------
61 #-----------------------------------------------------------------------------
62 # Flags and Aliases
62 # Flags and Aliases
63 #-----------------------------------------------------------------------------
63 #-----------------------------------------------------------------------------
64
64
65 kernel_aliases = dict(base_aliases)
65 kernel_aliases = dict(base_aliases)
66 kernel_aliases.update({
66 kernel_aliases.update({
67 'ip' : 'IPKernelApp.ip',
67 'ip' : 'IPKernelApp.ip',
68 'hb' : 'IPKernelApp.hb_port',
68 'hb' : 'IPKernelApp.hb_port',
69 'shell' : 'IPKernelApp.shell_port',
69 'shell' : 'IPKernelApp.shell_port',
70 'iopub' : 'IPKernelApp.iopub_port',
70 'iopub' : 'IPKernelApp.iopub_port',
71 'stdin' : 'IPKernelApp.stdin_port',
71 'stdin' : 'IPKernelApp.stdin_port',
72 'control' : 'IPKernelApp.control_port',
72 'control' : 'IPKernelApp.control_port',
73 'f' : 'IPKernelApp.connection_file',
73 'f' : 'IPKernelApp.connection_file',
74 'parent': 'IPKernelApp.parent_handle',
74 'parent': 'IPKernelApp.parent_handle',
75 'transport': 'IPKernelApp.transport',
75 'transport': 'IPKernelApp.transport',
76 })
76 })
77 if sys.platform.startswith('win'):
77 if sys.platform.startswith('win'):
78 kernel_aliases['interrupt'] = 'IPKernelApp.interrupt'
78 kernel_aliases['interrupt'] = 'IPKernelApp.interrupt'
79
79
80 kernel_flags = dict(base_flags)
80 kernel_flags = dict(base_flags)
81 kernel_flags.update({
81 kernel_flags.update({
82 'no-stdout' : (
82 'no-stdout' : (
83 {'IPKernelApp' : {'no_stdout' : True}},
83 {'IPKernelApp' : {'no_stdout' : True}},
84 "redirect stdout to the null device"),
84 "redirect stdout to the null device"),
85 'no-stderr' : (
85 'no-stderr' : (
86 {'IPKernelApp' : {'no_stderr' : True}},
86 {'IPKernelApp' : {'no_stderr' : True}},
87 "redirect stderr to the null device"),
87 "redirect stderr to the null device"),
88 'pylab' : (
88 'pylab' : (
89 {'IPKernelApp' : {'pylab' : 'auto'}},
89 {'IPKernelApp' : {'pylab' : 'auto'}},
90 """Pre-load matplotlib and numpy for interactive use with
90 """Pre-load matplotlib and numpy for interactive use with
91 the default matplotlib backend."""),
91 the default matplotlib backend."""),
92 })
92 })
93
93
94 # inherit flags&aliases for any IPython shell apps
94 # inherit flags&aliases for any IPython shell apps
95 kernel_aliases.update(shell_aliases)
95 kernel_aliases.update(shell_aliases)
96 kernel_flags.update(shell_flags)
96 kernel_flags.update(shell_flags)
97
97
98 # inherit flags&aliases for Sessions
98 # inherit flags&aliases for Sessions
99 kernel_aliases.update(session_aliases)
99 kernel_aliases.update(session_aliases)
100 kernel_flags.update(session_flags)
100 kernel_flags.update(session_flags)
101
101
102 _ctrl_c_message = """\
102 _ctrl_c_message = """\
103 NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
103 NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
104
104
105 To exit, you will have to explicitly quit this process, by either sending
105 To exit, you will have to explicitly quit this process, by either sending
106 "quit" from a client, or using Ctrl-\\ in UNIX-like environments.
106 "quit" from a client, or using Ctrl-\\ in UNIX-like environments.
107
107
108 To read more about this, see https://github.com/ipython/ipython/issues/2049
108 To read more about this, see https://github.com/ipython/ipython/issues/2049
109
109
110 """
110 """
111
111
112 #-----------------------------------------------------------------------------
112 #-----------------------------------------------------------------------------
113 # Application class for starting an IPython Kernel
113 # Application class for starting an IPython Kernel
114 #-----------------------------------------------------------------------------
114 #-----------------------------------------------------------------------------
115
115
116 class IPKernelApp(BaseIPythonApplication, InteractiveShellApp):
116 class IPKernelApp(BaseIPythonApplication, InteractiveShellApp):
117 name='ipkernel'
117 name='ipkernel'
118 aliases = Dict(kernel_aliases)
118 aliases = Dict(kernel_aliases)
119 flags = Dict(kernel_flags)
119 flags = Dict(kernel_flags)
120 classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session]
120 classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session]
121 # the kernel class, as an importstring
121 # the kernel class, as an importstring
122 kernel_class = DottedObjectName('IPython.kernel.zmq.ipkernel.Kernel', config=True,
122 kernel_class = DottedObjectName('IPython.kernel.zmq.ipkernel.Kernel', config=True,
123 help="""The Kernel subclass to be used.
123 help="""The Kernel subclass to be used.
124
124
125 This should allow easy re-use of the IPKernelApp entry point
125 This should allow easy re-use of the IPKernelApp entry point
126 to configure and launch kernels other than IPython's own.
126 to configure and launch kernels other than IPython's own.
127 """)
127 """)
128 kernel = Any()
128 kernel = Any()
129 poller = Any() # don't restrict this even though current pollers are all Threads
129 poller = Any() # don't restrict this even though current pollers are all Threads
130 heartbeat = Instance(Heartbeat)
130 heartbeat = Instance(Heartbeat)
131 session = Instance('IPython.kernel.zmq.session.Session')
131 session = Instance('IPython.kernel.zmq.session.Session')
132 ports = Dict()
132 ports = Dict()
133
133
134 # ipkernel doesn't get its own config file
134 # ipkernel doesn't get its own config file
135 def _config_file_name_default(self):
135 def _config_file_name_default(self):
136 return 'ipython_config.py'
136 return 'ipython_config.py'
137
137
138 # inherit config file name from parent:
138 # inherit config file name from parent:
139 parent_appname = Unicode(config=True)
139 parent_appname = Unicode(config=True)
140 def _parent_appname_changed(self, name, old, new):
140 def _parent_appname_changed(self, name, old, new):
141 if self.config_file_specified:
141 if self.config_file_specified:
142 # it was manually specified, ignore
142 # it was manually specified, ignore
143 return
143 return
144 self.config_file_name = new.replace('-','_') + u'_config.py'
144 self.config_file_name = new.replace('-','_') + u'_config.py'
145 # don't let this count as specifying the config file
145 # don't let this count as specifying the config file
146 self.config_file_specified.remove(self.config_file_name)
146 self.config_file_specified.remove(self.config_file_name)
147
147
148 # connection info:
148 # connection info:
149 transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
149 transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True)
150 ip = Unicode(config=True,
150 ip = Unicode(config=True,
151 help="Set the IP or interface on which the kernel will listen.")
151 help="Set the IP or interface on which the kernel will listen.")
152 def _ip_default(self):
152 def _ip_default(self):
153 if self.transport == 'ipc':
153 if self.transport == 'ipc':
154 if self.connection_file:
154 if self.connection_file:
155 return os.path.splitext(self.abs_connection_file)[0] + '-ipc'
155 return os.path.splitext(self.abs_connection_file)[0] + '-ipc'
156 else:
156 else:
157 return 'kernel-ipc'
157 return 'kernel-ipc'
158 else:
158 else:
159 return LOCALHOST
159 return localhost()
160
160 hb_port = Integer(0, config=True, help="set the heartbeat port [default: random]")
161 hb_port = Integer(0, config=True, help="set the heartbeat port [default: random]")
161 shell_port = Integer(0, config=True, help="set the shell (ROUTER) port [default: random]")
162 shell_port = Integer(0, config=True, help="set the shell (ROUTER) port [default: random]")
162 iopub_port = Integer(0, config=True, help="set the iopub (PUB) port [default: random]")
163 iopub_port = Integer(0, config=True, help="set the iopub (PUB) port [default: random]")
163 stdin_port = Integer(0, config=True, help="set the stdin (ROUTER) port [default: random]")
164 stdin_port = Integer(0, config=True, help="set the stdin (ROUTER) port [default: random]")
164 control_port = Integer(0, config=True, help="set the control (ROUTER) port [default: random]")
165 control_port = Integer(0, config=True, help="set the control (ROUTER) port [default: random]")
165 connection_file = Unicode('', config=True,
166 connection_file = Unicode('', config=True,
166 help="""JSON file in which to store connection info [default: kernel-<pid>.json]
167 help="""JSON file in which to store connection info [default: kernel-<pid>.json]
167
168
168 This file will contain the IP, ports, and authentication key needed to connect
169 This file will contain the IP, ports, and authentication key needed to connect
169 clients to this kernel. By default, this file will be created in the security dir
170 clients to this kernel. By default, this file will be created in the security dir
170 of the current profile, but can be specified by absolute path.
171 of the current profile, but can be specified by absolute path.
171 """)
172 """)
172 @property
173 @property
173 def abs_connection_file(self):
174 def abs_connection_file(self):
174 if os.path.basename(self.connection_file) == self.connection_file:
175 if os.path.basename(self.connection_file) == self.connection_file:
175 return os.path.join(self.profile_dir.security_dir, self.connection_file)
176 return os.path.join(self.profile_dir.security_dir, self.connection_file)
176 else:
177 else:
177 return self.connection_file
178 return self.connection_file
178
179
179
180
180 # streams, etc.
181 # streams, etc.
181 no_stdout = Bool(False, config=True, help="redirect stdout to the null device")
182 no_stdout = Bool(False, config=True, help="redirect stdout to the null device")
182 no_stderr = Bool(False, config=True, help="redirect stderr to the null device")
183 no_stderr = Bool(False, config=True, help="redirect stderr to the null device")
183 outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream',
184 outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream',
184 config=True, help="The importstring for the OutStream factory")
185 config=True, help="The importstring for the OutStream factory")
185 displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook',
186 displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook',
186 config=True, help="The importstring for the DisplayHook factory")
187 config=True, help="The importstring for the DisplayHook factory")
187
188
188 # polling
189 # polling
189 parent_handle = Integer(0, config=True,
190 parent_handle = Integer(0, config=True,
190 help="""kill this process if its parent dies. On Windows, the argument
191 help="""kill this process if its parent dies. On Windows, the argument
191 specifies the HANDLE of the parent process, otherwise it is simply boolean.
192 specifies the HANDLE of the parent process, otherwise it is simply boolean.
192 """)
193 """)
193 interrupt = Integer(0, config=True,
194 interrupt = Integer(0, config=True,
194 help="""ONLY USED ON WINDOWS
195 help="""ONLY USED ON WINDOWS
195 Interrupt this process when the parent is signaled.
196 Interrupt this process when the parent is signaled.
196 """)
197 """)
197
198
198 def init_crash_handler(self):
199 def init_crash_handler(self):
199 # Install minimal exception handling
200 # Install minimal exception handling
200 sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor',
201 sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor',
201 ostream=sys.__stdout__)
202 ostream=sys.__stdout__)
202
203
203 def init_poller(self):
204 def init_poller(self):
204 if sys.platform == 'win32':
205 if sys.platform == 'win32':
205 if self.interrupt or self.parent_handle:
206 if self.interrupt or self.parent_handle:
206 self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
207 self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
207 elif self.parent_handle:
208 elif self.parent_handle:
208 self.poller = ParentPollerUnix()
209 self.poller = ParentPollerUnix()
209
210
210 def _bind_socket(self, s, port):
211 def _bind_socket(self, s, port):
211 iface = '%s://%s' % (self.transport, self.ip)
212 iface = '%s://%s' % (self.transport, self.ip)
212 if self.transport == 'tcp':
213 if self.transport == 'tcp':
213 if port <= 0:
214 if port <= 0:
214 port = s.bind_to_random_port(iface)
215 port = s.bind_to_random_port(iface)
215 else:
216 else:
216 s.bind("tcp://%s:%i" % (self.ip, port))
217 s.bind("tcp://%s:%i" % (self.ip, port))
217 elif self.transport == 'ipc':
218 elif self.transport == 'ipc':
218 if port <= 0:
219 if port <= 0:
219 port = 1
220 port = 1
220 path = "%s-%i" % (self.ip, port)
221 path = "%s-%i" % (self.ip, port)
221 while os.path.exists(path):
222 while os.path.exists(path):
222 port = port + 1
223 port = port + 1
223 path = "%s-%i" % (self.ip, port)
224 path = "%s-%i" % (self.ip, port)
224 else:
225 else:
225 path = "%s-%i" % (self.ip, port)
226 path = "%s-%i" % (self.ip, port)
226 s.bind("ipc://%s" % path)
227 s.bind("ipc://%s" % path)
227 return port
228 return port
228
229
229 def load_connection_file(self):
230 def load_connection_file(self):
230 """load ip/port/hmac config from JSON connection file"""
231 """load ip/port/hmac config from JSON connection file"""
231 try:
232 try:
232 fname = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
233 fname = filefind(self.connection_file, ['.', self.profile_dir.security_dir])
233 except IOError:
234 except IOError:
234 self.log.debug("Connection file not found: %s", self.connection_file)
235 self.log.debug("Connection file not found: %s", self.connection_file)
235 # This means I own it, so I will clean it up:
236 # This means I own it, so I will clean it up:
236 atexit.register(self.cleanup_connection_file)
237 atexit.register(self.cleanup_connection_file)
237 return
238 return
238 self.log.debug(u"Loading connection file %s", fname)
239 self.log.debug(u"Loading connection file %s", fname)
239 with open(fname) as f:
240 with open(fname) as f:
240 s = f.read()
241 s = f.read()
241 cfg = json.loads(s)
242 cfg = json.loads(s)
242 self.transport = cfg.get('transport', self.transport)
243 self.transport = cfg.get('transport', self.transport)
243 if self.ip == self._ip_default() and 'ip' in cfg:
244 if self.ip == self._ip_default() and 'ip' in cfg:
244 # not overridden by config or cl_args
245 # not overridden by config or cl_args
245 self.ip = cfg['ip']
246 self.ip = cfg['ip']
246 for channel in ('hb', 'shell', 'iopub', 'stdin', 'control'):
247 for channel in ('hb', 'shell', 'iopub', 'stdin', 'control'):
247 name = channel + '_port'
248 name = channel + '_port'
248 if getattr(self, name) == 0 and name in cfg:
249 if getattr(self, name) == 0 and name in cfg:
249 # not overridden by config or cl_args
250 # not overridden by config or cl_args
250 setattr(self, name, cfg[name])
251 setattr(self, name, cfg[name])
251 if 'key' in cfg:
252 if 'key' in cfg:
252 self.config.Session.key = str_to_bytes(cfg['key'])
253 self.config.Session.key = str_to_bytes(cfg['key'])
253
254
254 def write_connection_file(self):
255 def write_connection_file(self):
255 """write connection info to JSON file"""
256 """write connection info to JSON file"""
256 cf = self.abs_connection_file
257 cf = self.abs_connection_file
257 self.log.debug("Writing connection file: %s", cf)
258 self.log.debug("Writing connection file: %s", cf)
258 write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
259 write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
259 shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
260 shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
260 iopub_port=self.iopub_port, control_port=self.control_port)
261 iopub_port=self.iopub_port, control_port=self.control_port)
261
262
262 def cleanup_connection_file(self):
263 def cleanup_connection_file(self):
263 cf = self.abs_connection_file
264 cf = self.abs_connection_file
264 self.log.debug("Cleaning up connection file: %s", cf)
265 self.log.debug("Cleaning up connection file: %s", cf)
265 try:
266 try:
266 os.remove(cf)
267 os.remove(cf)
267 except (IOError, OSError):
268 except (IOError, OSError):
268 pass
269 pass
269
270
270 self.cleanup_ipc_files()
271 self.cleanup_ipc_files()
271
272
272 def cleanup_ipc_files(self):
273 def cleanup_ipc_files(self):
273 """cleanup ipc files if we wrote them"""
274 """cleanup ipc files if we wrote them"""
274 if self.transport != 'ipc':
275 if self.transport != 'ipc':
275 return
276 return
276 for port in (self.shell_port, self.iopub_port, self.stdin_port, self.hb_port, self.control_port):
277 for port in (self.shell_port, self.iopub_port, self.stdin_port, self.hb_port, self.control_port):
277 ipcfile = "%s-%i" % (self.ip, port)
278 ipcfile = "%s-%i" % (self.ip, port)
278 try:
279 try:
279 os.remove(ipcfile)
280 os.remove(ipcfile)
280 except (IOError, OSError):
281 except (IOError, OSError):
281 pass
282 pass
282
283
283 def init_connection_file(self):
284 def init_connection_file(self):
284 if not self.connection_file:
285 if not self.connection_file:
285 self.connection_file = "kernel-%s.json"%os.getpid()
286 self.connection_file = "kernel-%s.json"%os.getpid()
286 try:
287 try:
287 self.load_connection_file()
288 self.load_connection_file()
288 except Exception:
289 except Exception:
289 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
290 self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
290 self.exit(1)
291 self.exit(1)
291
292
292 def init_sockets(self):
293 def init_sockets(self):
293 # Create a context, a session, and the kernel sockets.
294 # Create a context, a session, and the kernel sockets.
294 self.log.info("Starting the kernel at pid: %i", os.getpid())
295 self.log.info("Starting the kernel at pid: %i", os.getpid())
295 context = zmq.Context.instance()
296 context = zmq.Context.instance()
296 # Uncomment this to try closing the context.
297 # Uncomment this to try closing the context.
297 # atexit.register(context.term)
298 # atexit.register(context.term)
298
299
299 self.shell_socket = context.socket(zmq.ROUTER)
300 self.shell_socket = context.socket(zmq.ROUTER)
300 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
301 self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
301 self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
302 self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
302
303
303 self.iopub_socket = context.socket(zmq.PUB)
304 self.iopub_socket = context.socket(zmq.PUB)
304 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
305 self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
305 self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
306 self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
306
307
307 self.stdin_socket = context.socket(zmq.ROUTER)
308 self.stdin_socket = context.socket(zmq.ROUTER)
308 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
309 self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
309 self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
310 self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
310
311
311 self.control_socket = context.socket(zmq.ROUTER)
312 self.control_socket = context.socket(zmq.ROUTER)
312 self.control_port = self._bind_socket(self.control_socket, self.control_port)
313 self.control_port = self._bind_socket(self.control_socket, self.control_port)
313 self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
314 self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
314
315
315 def init_heartbeat(self):
316 def init_heartbeat(self):
316 """start the heart beating"""
317 """start the heart beating"""
317 # heartbeat doesn't share context, because it mustn't be blocked
318 # heartbeat doesn't share context, because it mustn't be blocked
318 # by the GIL, which is accessed by libzmq when freeing zero-copy messages
319 # by the GIL, which is accessed by libzmq when freeing zero-copy messages
319 hb_ctx = zmq.Context()
320 hb_ctx = zmq.Context()
320 self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
321 self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
321 self.hb_port = self.heartbeat.port
322 self.hb_port = self.heartbeat.port
322 self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
323 self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
323 self.heartbeat.start()
324 self.heartbeat.start()
324
325
325 def log_connection_info(self):
326 def log_connection_info(self):
326 """display connection info, and store ports"""
327 """display connection info, and store ports"""
327 basename = os.path.basename(self.connection_file)
328 basename = os.path.basename(self.connection_file)
328 if basename == self.connection_file or \
329 if basename == self.connection_file or \
329 os.path.dirname(self.connection_file) == self.profile_dir.security_dir:
330 os.path.dirname(self.connection_file) == self.profile_dir.security_dir:
330 # use shortname
331 # use shortname
331 tail = basename
332 tail = basename
332 if self.profile != 'default':
333 if self.profile != 'default':
333 tail += " --profile %s" % self.profile
334 tail += " --profile %s" % self.profile
334 else:
335 else:
335 tail = self.connection_file
336 tail = self.connection_file
336 lines = [
337 lines = [
337 "To connect another client to this kernel, use:",
338 "To connect another client to this kernel, use:",
338 " --existing %s" % tail,
339 " --existing %s" % tail,
339 ]
340 ]
340 # log connection info
341 # log connection info
341 # info-level, so often not shown.
342 # info-level, so often not shown.
342 # frontends should use the %connect_info magic
343 # frontends should use the %connect_info magic
343 # to see the connection info
344 # to see the connection info
344 for line in lines:
345 for line in lines:
345 self.log.info(line)
346 self.log.info(line)
346 # also raw print to the terminal if no parent_handle (`ipython kernel`)
347 # also raw print to the terminal if no parent_handle (`ipython kernel`)
347 if not self.parent_handle:
348 if not self.parent_handle:
348 io.rprint(_ctrl_c_message)
349 io.rprint(_ctrl_c_message)
349 for line in lines:
350 for line in lines:
350 io.rprint(line)
351 io.rprint(line)
351
352
352 self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
353 self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
353 stdin=self.stdin_port, hb=self.hb_port,
354 stdin=self.stdin_port, hb=self.hb_port,
354 control=self.control_port)
355 control=self.control_port)
355
356
356 def init_session(self):
357 def init_session(self):
357 """create our session object"""
358 """create our session object"""
358 default_secure(self.config)
359 default_secure(self.config)
359 self.session = Session(parent=self, username=u'kernel')
360 self.session = Session(parent=self, username=u'kernel')
360
361
361 def init_blackhole(self):
362 def init_blackhole(self):
362 """redirects stdout/stderr to devnull if necessary"""
363 """redirects stdout/stderr to devnull if necessary"""
363 if self.no_stdout or self.no_stderr:
364 if self.no_stdout or self.no_stderr:
364 blackhole = open(os.devnull, 'w')
365 blackhole = open(os.devnull, 'w')
365 if self.no_stdout:
366 if self.no_stdout:
366 sys.stdout = sys.__stdout__ = blackhole
367 sys.stdout = sys.__stdout__ = blackhole
367 if self.no_stderr:
368 if self.no_stderr:
368 sys.stderr = sys.__stderr__ = blackhole
369 sys.stderr = sys.__stderr__ = blackhole
369
370
370 def init_io(self):
371 def init_io(self):
371 """Redirect input streams and set a display hook."""
372 """Redirect input streams and set a display hook."""
372 if self.outstream_class:
373 if self.outstream_class:
373 outstream_factory = import_item(str(self.outstream_class))
374 outstream_factory = import_item(str(self.outstream_class))
374 sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout')
375 sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout')
375 sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr')
376 sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr')
376 if self.displayhook_class:
377 if self.displayhook_class:
377 displayhook_factory = import_item(str(self.displayhook_class))
378 displayhook_factory = import_item(str(self.displayhook_class))
378 sys.displayhook = displayhook_factory(self.session, self.iopub_socket)
379 sys.displayhook = displayhook_factory(self.session, self.iopub_socket)
379
380
380 def init_signal(self):
381 def init_signal(self):
381 signal.signal(signal.SIGINT, signal.SIG_IGN)
382 signal.signal(signal.SIGINT, signal.SIG_IGN)
382
383
383 def init_kernel(self):
384 def init_kernel(self):
384 """Create the Kernel object itself"""
385 """Create the Kernel object itself"""
385 shell_stream = ZMQStream(self.shell_socket)
386 shell_stream = ZMQStream(self.shell_socket)
386 control_stream = ZMQStream(self.control_socket)
387 control_stream = ZMQStream(self.control_socket)
387
388
388 kernel_factory = import_item(str(self.kernel_class))
389 kernel_factory = import_item(str(self.kernel_class))
389
390
390 kernel = kernel_factory(parent=self, session=self.session,
391 kernel = kernel_factory(parent=self, session=self.session,
391 shell_streams=[shell_stream, control_stream],
392 shell_streams=[shell_stream, control_stream],
392 iopub_socket=self.iopub_socket,
393 iopub_socket=self.iopub_socket,
393 stdin_socket=self.stdin_socket,
394 stdin_socket=self.stdin_socket,
394 log=self.log,
395 log=self.log,
395 profile_dir=self.profile_dir,
396 profile_dir=self.profile_dir,
396 user_ns=self.user_ns,
397 user_ns=self.user_ns,
397 )
398 )
398 kernel.record_ports(self.ports)
399 kernel.record_ports(self.ports)
399 self.kernel = kernel
400 self.kernel = kernel
400
401
401 def init_gui_pylab(self):
402 def init_gui_pylab(self):
402 """Enable GUI event loop integration, taking pylab into account."""
403 """Enable GUI event loop integration, taking pylab into account."""
403
404
404 # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
405 # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
405 # to ensure that any exception is printed straight to stderr.
406 # to ensure that any exception is printed straight to stderr.
406 # Normally _showtraceback associates the reply with an execution,
407 # Normally _showtraceback associates the reply with an execution,
407 # which means frontends will never draw it, as this exception
408 # which means frontends will never draw it, as this exception
408 # is not associated with any execute request.
409 # is not associated with any execute request.
409
410
410 shell = self.shell
411 shell = self.shell
411 _showtraceback = shell._showtraceback
412 _showtraceback = shell._showtraceback
412 try:
413 try:
413 # replace pyerr-sending traceback with stderr
414 # replace pyerr-sending traceback with stderr
414 def print_tb(etype, evalue, stb):
415 def print_tb(etype, evalue, stb):
415 print ("GUI event loop or pylab initialization failed",
416 print ("GUI event loop or pylab initialization failed",
416 file=io.stderr)
417 file=io.stderr)
417 print (shell.InteractiveTB.stb2text(stb), file=io.stderr)
418 print (shell.InteractiveTB.stb2text(stb), file=io.stderr)
418 shell._showtraceback = print_tb
419 shell._showtraceback = print_tb
419 InteractiveShellApp.init_gui_pylab(self)
420 InteractiveShellApp.init_gui_pylab(self)
420 finally:
421 finally:
421 shell._showtraceback = _showtraceback
422 shell._showtraceback = _showtraceback
422
423
423 def init_shell(self):
424 def init_shell(self):
424 self.shell = self.kernel.shell
425 self.shell = self.kernel.shell
425 self.shell.configurables.append(self)
426 self.shell.configurables.append(self)
426
427
427 @catch_config_error
428 @catch_config_error
428 def initialize(self, argv=None):
429 def initialize(self, argv=None):
429 super(IPKernelApp, self).initialize(argv)
430 super(IPKernelApp, self).initialize(argv)
430 self.init_blackhole()
431 self.init_blackhole()
431 self.init_connection_file()
432 self.init_connection_file()
432 self.init_session()
433 self.init_session()
433 self.init_poller()
434 self.init_poller()
434 self.init_sockets()
435 self.init_sockets()
435 self.init_heartbeat()
436 self.init_heartbeat()
436 # writing/displaying connection info must be *after* init_sockets/heartbeat
437 # writing/displaying connection info must be *after* init_sockets/heartbeat
437 self.log_connection_info()
438 self.log_connection_info()
438 self.write_connection_file()
439 self.write_connection_file()
439 self.init_io()
440 self.init_io()
440 self.init_signal()
441 self.init_signal()
441 self.init_kernel()
442 self.init_kernel()
442 # shell init steps
443 # shell init steps
443 self.init_path()
444 self.init_path()
444 self.init_shell()
445 self.init_shell()
445 self.init_gui_pylab()
446 self.init_gui_pylab()
446 self.init_extensions()
447 self.init_extensions()
447 self.init_code()
448 self.init_code()
448 # flush stdout/stderr, so that anything written to these streams during
449 # flush stdout/stderr, so that anything written to these streams during
449 # initialization do not get associated with the first execution request
450 # initialization do not get associated with the first execution request
450 sys.stdout.flush()
451 sys.stdout.flush()
451 sys.stderr.flush()
452 sys.stderr.flush()
452
453
453 def start(self):
454 def start(self):
454 if self.poller is not None:
455 if self.poller is not None:
455 self.poller.start()
456 self.poller.start()
456 self.kernel.start()
457 self.kernel.start()
457 try:
458 try:
458 ioloop.IOLoop.instance().start()
459 ioloop.IOLoop.instance().start()
459 except KeyboardInterrupt:
460 except KeyboardInterrupt:
460 pass
461 pass
461
462
462 launch_new_instance = IPKernelApp.launch_instance
463 launch_new_instance = IPKernelApp.launch_instance
463
464
464 def main():
465 def main():
465 """Run an IPKernel as an application"""
466 """Run an IPKernel as an application"""
466 app = IPKernelApp.instance()
467 app = IPKernelApp.instance()
467 app.initialize()
468 app.initialize()
468 app.start()
469 app.start()
469
470
470
471
471 if __name__ == '__main__':
472 if __name__ == '__main__':
472 main()
473 main()
@@ -1,547 +1,547 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 # encoding: utf-8
2 # encoding: utf-8
3 """
3 """
4 The IPython controller application.
4 The IPython controller application.
5
5
6 Authors:
6 Authors:
7
7
8 * Brian Granger
8 * Brian Granger
9 * MinRK
9 * MinRK
10
10
11 """
11 """
12
12
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Copyright (C) 2008-2011 The IPython Development Team
14 # Copyright (C) 2008 The IPython Development Team
15 #
15 #
16 # Distributed under the terms of the BSD License. The full license is in
16 # Distributed under the terms of the BSD License. The full license is in
17 # the file COPYING, distributed as part of this software.
17 # the file COPYING, distributed as part of this software.
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19
19
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21 # Imports
21 # Imports
22 #-----------------------------------------------------------------------------
22 #-----------------------------------------------------------------------------
23
23
24 from __future__ import with_statement
24 from __future__ import with_statement
25
25
26 import json
26 import json
27 import os
27 import os
28 import stat
28 import stat
29 import sys
29 import sys
30
30
31 from multiprocessing import Process
31 from multiprocessing import Process
32 from signal import signal, SIGINT, SIGABRT, SIGTERM
32 from signal import signal, SIGINT, SIGABRT, SIGTERM
33
33
34 import zmq
34 import zmq
35 from zmq.devices import ProcessMonitoredQueue
35 from zmq.devices import ProcessMonitoredQueue
36 from zmq.log.handlers import PUBHandler
36 from zmq.log.handlers import PUBHandler
37
37
38 from IPython.core.profiledir import ProfileDir
38 from IPython.core.profiledir import ProfileDir
39
39
40 from IPython.parallel.apps.baseapp import (
40 from IPython.parallel.apps.baseapp import (
41 BaseParallelApplication,
41 BaseParallelApplication,
42 base_aliases,
42 base_aliases,
43 base_flags,
43 base_flags,
44 catch_config_error,
44 catch_config_error,
45 )
45 )
46 from IPython.utils.importstring import import_item
46 from IPython.utils.importstring import import_item
47 from IPython.utils.localinterfaces import LOCALHOST, PUBLIC_IPS
47 from IPython.utils.localinterfaces import localhost, public_ips
48 from IPython.utils.traitlets import Instance, Unicode, Bool, List, Dict, TraitError
48 from IPython.utils.traitlets import Instance, Unicode, Bool, List, Dict, TraitError
49
49
50 from IPython.kernel.zmq.session import (
50 from IPython.kernel.zmq.session import (
51 Session, session_aliases, session_flags, default_secure
51 Session, session_aliases, session_flags, default_secure
52 )
52 )
53
53
54 from IPython.parallel.controller.heartmonitor import HeartMonitor
54 from IPython.parallel.controller.heartmonitor import HeartMonitor
55 from IPython.parallel.controller.hub import HubFactory
55 from IPython.parallel.controller.hub import HubFactory
56 from IPython.parallel.controller.scheduler import TaskScheduler,launch_scheduler
56 from IPython.parallel.controller.scheduler import TaskScheduler,launch_scheduler
57 from IPython.parallel.controller.dictdb import DictDB
57 from IPython.parallel.controller.dictdb import DictDB
58
58
59 from IPython.parallel.util import split_url, disambiguate_url, set_hwm
59 from IPython.parallel.util import split_url, disambiguate_url, set_hwm
60
60
61 # conditional import of SQLiteDB / MongoDB backend class
61 # conditional import of SQLiteDB / MongoDB backend class
62 real_dbs = []
62 real_dbs = []
63
63
64 try:
64 try:
65 from IPython.parallel.controller.sqlitedb import SQLiteDB
65 from IPython.parallel.controller.sqlitedb import SQLiteDB
66 except ImportError:
66 except ImportError:
67 pass
67 pass
68 else:
68 else:
69 real_dbs.append(SQLiteDB)
69 real_dbs.append(SQLiteDB)
70
70
71 try:
71 try:
72 from IPython.parallel.controller.mongodb import MongoDB
72 from IPython.parallel.controller.mongodb import MongoDB
73 except ImportError:
73 except ImportError:
74 pass
74 pass
75 else:
75 else:
76 real_dbs.append(MongoDB)
76 real_dbs.append(MongoDB)
77
77
78
78
79
79
80 #-----------------------------------------------------------------------------
80 #-----------------------------------------------------------------------------
81 # Module level variables
81 # Module level variables
82 #-----------------------------------------------------------------------------
82 #-----------------------------------------------------------------------------
83
83
84
84
85 _description = """Start the IPython controller for parallel computing.
85 _description = """Start the IPython controller for parallel computing.
86
86
87 The IPython controller provides a gateway between the IPython engines and
87 The IPython controller provides a gateway between the IPython engines and
88 clients. The controller needs to be started before the engines and can be
88 clients. The controller needs to be started before the engines and can be
89 configured using command line options or using a cluster directory. Cluster
89 configured using command line options or using a cluster directory. Cluster
90 directories contain config, log and security files and are usually located in
90 directories contain config, log and security files and are usually located in
91 your ipython directory and named as "profile_name". See the `profile`
91 your ipython directory and named as "profile_name". See the `profile`
92 and `profile-dir` options for details.
92 and `profile-dir` options for details.
93 """
93 """
94
94
95 _examples = """
95 _examples = """
96 ipcontroller --ip=192.168.0.1 --port=1000 # listen on ip, port for engines
96 ipcontroller --ip=192.168.0.1 --port=1000 # listen on ip, port for engines
97 ipcontroller --scheme=pure # use the pure zeromq scheduler
97 ipcontroller --scheme=pure # use the pure zeromq scheduler
98 """
98 """
99
99
100
100
101 #-----------------------------------------------------------------------------
101 #-----------------------------------------------------------------------------
102 # The main application
102 # The main application
103 #-----------------------------------------------------------------------------
103 #-----------------------------------------------------------------------------
104 flags = {}
104 flags = {}
105 flags.update(base_flags)
105 flags.update(base_flags)
106 flags.update({
106 flags.update({
107 'usethreads' : ( {'IPControllerApp' : {'use_threads' : True}},
107 'usethreads' : ( {'IPControllerApp' : {'use_threads' : True}},
108 'Use threads instead of processes for the schedulers'),
108 'Use threads instead of processes for the schedulers'),
109 'sqlitedb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.sqlitedb.SQLiteDB'}},
109 'sqlitedb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.sqlitedb.SQLiteDB'}},
110 'use the SQLiteDB backend'),
110 'use the SQLiteDB backend'),
111 'mongodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.mongodb.MongoDB'}},
111 'mongodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.mongodb.MongoDB'}},
112 'use the MongoDB backend'),
112 'use the MongoDB backend'),
113 'dictdb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.DictDB'}},
113 'dictdb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.DictDB'}},
114 'use the in-memory DictDB backend'),
114 'use the in-memory DictDB backend'),
115 'nodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.NoDB'}},
115 'nodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.NoDB'}},
116 """use dummy DB backend, which doesn't store any information.
116 """use dummy DB backend, which doesn't store any information.
117
117
118 This is the default as of IPython 0.13.
118 This is the default as of IPython 0.13.
119
119
120 To enable delayed or repeated retrieval of results from the Hub,
120 To enable delayed or repeated retrieval of results from the Hub,
121 select one of the true db backends.
121 select one of the true db backends.
122 """),
122 """),
123 'reuse' : ({'IPControllerApp' : {'reuse_files' : True}},
123 'reuse' : ({'IPControllerApp' : {'reuse_files' : True}},
124 'reuse existing json connection files'),
124 'reuse existing json connection files'),
125 'restore' : ({'IPControllerApp' : {'restore_engines' : True, 'reuse_files' : True}},
125 'restore' : ({'IPControllerApp' : {'restore_engines' : True, 'reuse_files' : True}},
126 'Attempt to restore engines from a JSON file. '
126 'Attempt to restore engines from a JSON file. '
127 'For use when resuming a crashed controller'),
127 'For use when resuming a crashed controller'),
128 })
128 })
129
129
130 flags.update(session_flags)
130 flags.update(session_flags)
131
131
132 aliases = dict(
132 aliases = dict(
133 ssh = 'IPControllerApp.ssh_server',
133 ssh = 'IPControllerApp.ssh_server',
134 enginessh = 'IPControllerApp.engine_ssh_server',
134 enginessh = 'IPControllerApp.engine_ssh_server',
135 location = 'IPControllerApp.location',
135 location = 'IPControllerApp.location',
136
136
137 url = 'HubFactory.url',
137 url = 'HubFactory.url',
138 ip = 'HubFactory.ip',
138 ip = 'HubFactory.ip',
139 transport = 'HubFactory.transport',
139 transport = 'HubFactory.transport',
140 port = 'HubFactory.regport',
140 port = 'HubFactory.regport',
141
141
142 ping = 'HeartMonitor.period',
142 ping = 'HeartMonitor.period',
143
143
144 scheme = 'TaskScheduler.scheme_name',
144 scheme = 'TaskScheduler.scheme_name',
145 hwm = 'TaskScheduler.hwm',
145 hwm = 'TaskScheduler.hwm',
146 )
146 )
147 aliases.update(base_aliases)
147 aliases.update(base_aliases)
148 aliases.update(session_aliases)
148 aliases.update(session_aliases)
149
149
150 class IPControllerApp(BaseParallelApplication):
150 class IPControllerApp(BaseParallelApplication):
151
151
152 name = u'ipcontroller'
152 name = u'ipcontroller'
153 description = _description
153 description = _description
154 examples = _examples
154 examples = _examples
155 classes = [ProfileDir, Session, HubFactory, TaskScheduler, HeartMonitor, DictDB] + real_dbs
155 classes = [ProfileDir, Session, HubFactory, TaskScheduler, HeartMonitor, DictDB] + real_dbs
156
156
157 # change default to True
157 # change default to True
158 auto_create = Bool(True, config=True,
158 auto_create = Bool(True, config=True,
159 help="""Whether to create profile dir if it doesn't exist.""")
159 help="""Whether to create profile dir if it doesn't exist.""")
160
160
161 reuse_files = Bool(False, config=True,
161 reuse_files = Bool(False, config=True,
162 help="""Whether to reuse existing json connection files.
162 help="""Whether to reuse existing json connection files.
163 If False, connection files will be removed on a clean exit.
163 If False, connection files will be removed on a clean exit.
164 """
164 """
165 )
165 )
166 restore_engines = Bool(False, config=True,
166 restore_engines = Bool(False, config=True,
167 help="""Reload engine state from JSON file
167 help="""Reload engine state from JSON file
168 """
168 """
169 )
169 )
170 ssh_server = Unicode(u'', config=True,
170 ssh_server = Unicode(u'', config=True,
171 help="""ssh url for clients to use when connecting to the Controller
171 help="""ssh url for clients to use when connecting to the Controller
172 processes. It should be of the form: [user@]server[:port]. The
172 processes. It should be of the form: [user@]server[:port]. The
173 Controller's listening addresses must be accessible from the ssh server""",
173 Controller's listening addresses must be accessible from the ssh server""",
174 )
174 )
175 engine_ssh_server = Unicode(u'', config=True,
175 engine_ssh_server = Unicode(u'', config=True,
176 help="""ssh url for engines to use when connecting to the Controller
176 help="""ssh url for engines to use when connecting to the Controller
177 processes. It should be of the form: [user@]server[:port]. The
177 processes. It should be of the form: [user@]server[:port]. The
178 Controller's listening addresses must be accessible from the ssh server""",
178 Controller's listening addresses must be accessible from the ssh server""",
179 )
179 )
180 location = Unicode(u'', config=True,
180 location = Unicode(u'', config=True,
181 help="""The external IP or domain name of the Controller, used for disambiguating
181 help="""The external IP or domain name of the Controller, used for disambiguating
182 engine and client connections.""",
182 engine and client connections.""",
183 )
183 )
184 import_statements = List([], config=True,
184 import_statements = List([], config=True,
185 help="import statements to be run at startup. Necessary in some environments"
185 help="import statements to be run at startup. Necessary in some environments"
186 )
186 )
187
187
188 use_threads = Bool(False, config=True,
188 use_threads = Bool(False, config=True,
189 help='Use threads instead of processes for the schedulers',
189 help='Use threads instead of processes for the schedulers',
190 )
190 )
191
191
192 engine_json_file = Unicode('ipcontroller-engine.json', config=True,
192 engine_json_file = Unicode('ipcontroller-engine.json', config=True,
193 help="JSON filename where engine connection info will be stored.")
193 help="JSON filename where engine connection info will be stored.")
194 client_json_file = Unicode('ipcontroller-client.json', config=True,
194 client_json_file = Unicode('ipcontroller-client.json', config=True,
195 help="JSON filename where client connection info will be stored.")
195 help="JSON filename where client connection info will be stored.")
196
196
197 def _cluster_id_changed(self, name, old, new):
197 def _cluster_id_changed(self, name, old, new):
198 super(IPControllerApp, self)._cluster_id_changed(name, old, new)
198 super(IPControllerApp, self)._cluster_id_changed(name, old, new)
199 self.engine_json_file = "%s-engine.json" % self.name
199 self.engine_json_file = "%s-engine.json" % self.name
200 self.client_json_file = "%s-client.json" % self.name
200 self.client_json_file = "%s-client.json" % self.name
201
201
202
202
203 # internal
203 # internal
204 children = List()
204 children = List()
205 mq_class = Unicode('zmq.devices.ProcessMonitoredQueue')
205 mq_class = Unicode('zmq.devices.ProcessMonitoredQueue')
206
206
207 def _use_threads_changed(self, name, old, new):
207 def _use_threads_changed(self, name, old, new):
208 self.mq_class = 'zmq.devices.%sMonitoredQueue'%('Thread' if new else 'Process')
208 self.mq_class = 'zmq.devices.%sMonitoredQueue'%('Thread' if new else 'Process')
209
209
210 write_connection_files = Bool(True,
210 write_connection_files = Bool(True,
211 help="""Whether to write connection files to disk.
211 help="""Whether to write connection files to disk.
212 True in all cases other than runs with `reuse_files=True` *after the first*
212 True in all cases other than runs with `reuse_files=True` *after the first*
213 """
213 """
214 )
214 )
215
215
216 aliases = Dict(aliases)
216 aliases = Dict(aliases)
217 flags = Dict(flags)
217 flags = Dict(flags)
218
218
219
219
220 def save_connection_dict(self, fname, cdict):
220 def save_connection_dict(self, fname, cdict):
221 """save a connection dict to json file."""
221 """save a connection dict to json file."""
222 c = self.config
222 c = self.config
223 url = cdict['registration']
223 url = cdict['registration']
224 location = cdict['location']
224 location = cdict['location']
225
225
226 if not location:
226 if not location:
227 if PUBLIC_IPS:
227 if public_ips():
228 location = PUBLIC_IPS[-1]
228 location = public_ips()[-1]
229 else:
229 else:
230 self.log.warn("Could not identify this machine's IP, assuming %s."
230 self.log.warn("Could not identify this machine's IP, assuming %s."
231 " You may need to specify '--location=<external_ip_address>' to help"
231 " You may need to specify '--location=<external_ip_address>' to help"
232 " IPython decide when to connect via loopback." % LOCALHOST)
232 " IPython decide when to connect via loopback." % localhost() )
233 location = LOCALHOST
233 location = localhost()
234 cdict['location'] = location
234 cdict['location'] = location
235 fname = os.path.join(self.profile_dir.security_dir, fname)
235 fname = os.path.join(self.profile_dir.security_dir, fname)
236 self.log.info("writing connection info to %s", fname)
236 self.log.info("writing connection info to %s", fname)
237 with open(fname, 'w') as f:
237 with open(fname, 'w') as f:
238 f.write(json.dumps(cdict, indent=2))
238 f.write(json.dumps(cdict, indent=2))
239 os.chmod(fname, stat.S_IRUSR|stat.S_IWUSR)
239 os.chmod(fname, stat.S_IRUSR|stat.S_IWUSR)
240
240
241 def load_config_from_json(self):
241 def load_config_from_json(self):
242 """load config from existing json connector files."""
242 """load config from existing json connector files."""
243 c = self.config
243 c = self.config
244 self.log.debug("loading config from JSON")
244 self.log.debug("loading config from JSON")
245
245
246 # load engine config
246 # load engine config
247
247
248 fname = os.path.join(self.profile_dir.security_dir, self.engine_json_file)
248 fname = os.path.join(self.profile_dir.security_dir, self.engine_json_file)
249 self.log.info("loading connection info from %s", fname)
249 self.log.info("loading connection info from %s", fname)
250 with open(fname) as f:
250 with open(fname) as f:
251 ecfg = json.loads(f.read())
251 ecfg = json.loads(f.read())
252
252
253 # json gives unicode, Session.key wants bytes
253 # json gives unicode, Session.key wants bytes
254 c.Session.key = ecfg['key'].encode('ascii')
254 c.Session.key = ecfg['key'].encode('ascii')
255
255
256 xport,ip = ecfg['interface'].split('://')
256 xport,ip = ecfg['interface'].split('://')
257
257
258 c.HubFactory.engine_ip = ip
258 c.HubFactory.engine_ip = ip
259 c.HubFactory.engine_transport = xport
259 c.HubFactory.engine_transport = xport
260
260
261 self.location = ecfg['location']
261 self.location = ecfg['location']
262 if not self.engine_ssh_server:
262 if not self.engine_ssh_server:
263 self.engine_ssh_server = ecfg['ssh']
263 self.engine_ssh_server = ecfg['ssh']
264
264
265 # load client config
265 # load client config
266
266
267 fname = os.path.join(self.profile_dir.security_dir, self.client_json_file)
267 fname = os.path.join(self.profile_dir.security_dir, self.client_json_file)
268 self.log.info("loading connection info from %s", fname)
268 self.log.info("loading connection info from %s", fname)
269 with open(fname) as f:
269 with open(fname) as f:
270 ccfg = json.loads(f.read())
270 ccfg = json.loads(f.read())
271
271
272 for key in ('key', 'registration', 'pack', 'unpack', 'signature_scheme'):
272 for key in ('key', 'registration', 'pack', 'unpack', 'signature_scheme'):
273 assert ccfg[key] == ecfg[key], "mismatch between engine and client info: %r" % key
273 assert ccfg[key] == ecfg[key], "mismatch between engine and client info: %r" % key
274
274
275 xport,addr = ccfg['interface'].split('://')
275 xport,addr = ccfg['interface'].split('://')
276
276
277 c.HubFactory.client_transport = xport
277 c.HubFactory.client_transport = xport
278 c.HubFactory.client_ip = ip
278 c.HubFactory.client_ip = ip
279 if not self.ssh_server:
279 if not self.ssh_server:
280 self.ssh_server = ccfg['ssh']
280 self.ssh_server = ccfg['ssh']
281
281
282 # load port config:
282 # load port config:
283 c.HubFactory.regport = ecfg['registration']
283 c.HubFactory.regport = ecfg['registration']
284 c.HubFactory.hb = (ecfg['hb_ping'], ecfg['hb_pong'])
284 c.HubFactory.hb = (ecfg['hb_ping'], ecfg['hb_pong'])
285 c.HubFactory.control = (ccfg['control'], ecfg['control'])
285 c.HubFactory.control = (ccfg['control'], ecfg['control'])
286 c.HubFactory.mux = (ccfg['mux'], ecfg['mux'])
286 c.HubFactory.mux = (ccfg['mux'], ecfg['mux'])
287 c.HubFactory.task = (ccfg['task'], ecfg['task'])
287 c.HubFactory.task = (ccfg['task'], ecfg['task'])
288 c.HubFactory.iopub = (ccfg['iopub'], ecfg['iopub'])
288 c.HubFactory.iopub = (ccfg['iopub'], ecfg['iopub'])
289 c.HubFactory.notifier_port = ccfg['notification']
289 c.HubFactory.notifier_port = ccfg['notification']
290
290
291 def cleanup_connection_files(self):
291 def cleanup_connection_files(self):
292 if self.reuse_files:
292 if self.reuse_files:
293 self.log.debug("leaving JSON connection files for reuse")
293 self.log.debug("leaving JSON connection files for reuse")
294 return
294 return
295 self.log.debug("cleaning up JSON connection files")
295 self.log.debug("cleaning up JSON connection files")
296 for f in (self.client_json_file, self.engine_json_file):
296 for f in (self.client_json_file, self.engine_json_file):
297 f = os.path.join(self.profile_dir.security_dir, f)
297 f = os.path.join(self.profile_dir.security_dir, f)
298 try:
298 try:
299 os.remove(f)
299 os.remove(f)
300 except Exception as e:
300 except Exception as e:
301 self.log.error("Failed to cleanup connection file: %s", e)
301 self.log.error("Failed to cleanup connection file: %s", e)
302 else:
302 else:
303 self.log.debug(u"removed %s", f)
303 self.log.debug(u"removed %s", f)
304
304
305 def load_secondary_config(self):
305 def load_secondary_config(self):
306 """secondary config, loading from JSON and setting defaults"""
306 """secondary config, loading from JSON and setting defaults"""
307 if self.reuse_files:
307 if self.reuse_files:
308 try:
308 try:
309 self.load_config_from_json()
309 self.load_config_from_json()
310 except (AssertionError,IOError) as e:
310 except (AssertionError,IOError) as e:
311 self.log.error("Could not load config from JSON: %s" % e)
311 self.log.error("Could not load config from JSON: %s" % e)
312 else:
312 else:
313 # successfully loaded config from JSON, and reuse=True
313 # successfully loaded config from JSON, and reuse=True
314 # no need to wite back the same file
314 # no need to wite back the same file
315 self.write_connection_files = False
315 self.write_connection_files = False
316
316
317 # switch Session.key default to secure
317 # switch Session.key default to secure
318 default_secure(self.config)
318 default_secure(self.config)
319 self.log.debug("Config changed")
319 self.log.debug("Config changed")
320 self.log.debug(repr(self.config))
320 self.log.debug(repr(self.config))
321
321
322 def init_hub(self):
322 def init_hub(self):
323 c = self.config
323 c = self.config
324
324
325 self.do_import_statements()
325 self.do_import_statements()
326
326
327 try:
327 try:
328 self.factory = HubFactory(config=c, log=self.log)
328 self.factory = HubFactory(config=c, log=self.log)
329 # self.start_logging()
329 # self.start_logging()
330 self.factory.init_hub()
330 self.factory.init_hub()
331 except TraitError:
331 except TraitError:
332 raise
332 raise
333 except Exception:
333 except Exception:
334 self.log.error("Couldn't construct the Controller", exc_info=True)
334 self.log.error("Couldn't construct the Controller", exc_info=True)
335 self.exit(1)
335 self.exit(1)
336
336
337 if self.write_connection_files:
337 if self.write_connection_files:
338 # save to new json config files
338 # save to new json config files
339 f = self.factory
339 f = self.factory
340 base = {
340 base = {
341 'key' : f.session.key.decode('ascii'),
341 'key' : f.session.key.decode('ascii'),
342 'location' : self.location,
342 'location' : self.location,
343 'pack' : f.session.packer,
343 'pack' : f.session.packer,
344 'unpack' : f.session.unpacker,
344 'unpack' : f.session.unpacker,
345 'signature_scheme' : f.session.signature_scheme,
345 'signature_scheme' : f.session.signature_scheme,
346 }
346 }
347
347
348 cdict = {'ssh' : self.ssh_server}
348 cdict = {'ssh' : self.ssh_server}
349 cdict.update(f.client_info)
349 cdict.update(f.client_info)
350 cdict.update(base)
350 cdict.update(base)
351 self.save_connection_dict(self.client_json_file, cdict)
351 self.save_connection_dict(self.client_json_file, cdict)
352
352
353 edict = {'ssh' : self.engine_ssh_server}
353 edict = {'ssh' : self.engine_ssh_server}
354 edict.update(f.engine_info)
354 edict.update(f.engine_info)
355 edict.update(base)
355 edict.update(base)
356 self.save_connection_dict(self.engine_json_file, edict)
356 self.save_connection_dict(self.engine_json_file, edict)
357
357
358 fname = "engines%s.json" % self.cluster_id
358 fname = "engines%s.json" % self.cluster_id
359 self.factory.hub.engine_state_file = os.path.join(self.profile_dir.log_dir, fname)
359 self.factory.hub.engine_state_file = os.path.join(self.profile_dir.log_dir, fname)
360 if self.restore_engines:
360 if self.restore_engines:
361 self.factory.hub._load_engine_state()
361 self.factory.hub._load_engine_state()
362
362
363 def init_schedulers(self):
363 def init_schedulers(self):
364 children = self.children
364 children = self.children
365 mq = import_item(str(self.mq_class))
365 mq = import_item(str(self.mq_class))
366
366
367 f = self.factory
367 f = self.factory
368 ident = f.session.bsession
368 ident = f.session.bsession
369 # disambiguate url, in case of *
369 # disambiguate url, in case of *
370 monitor_url = disambiguate_url(f.monitor_url)
370 monitor_url = disambiguate_url(f.monitor_url)
371 # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url
371 # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url
372 # IOPub relay (in a Process)
372 # IOPub relay (in a Process)
373 q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A',b'iopub')
373 q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A',b'iopub')
374 q.bind_in(f.client_url('iopub'))
374 q.bind_in(f.client_url('iopub'))
375 q.setsockopt_in(zmq.IDENTITY, ident + b"_iopub")
375 q.setsockopt_in(zmq.IDENTITY, ident + b"_iopub")
376 q.bind_out(f.engine_url('iopub'))
376 q.bind_out(f.engine_url('iopub'))
377 q.setsockopt_out(zmq.SUBSCRIBE, b'')
377 q.setsockopt_out(zmq.SUBSCRIBE, b'')
378 q.connect_mon(monitor_url)
378 q.connect_mon(monitor_url)
379 q.daemon=True
379 q.daemon=True
380 children.append(q)
380 children.append(q)
381
381
382 # Multiplexer Queue (in a Process)
382 # Multiplexer Queue (in a Process)
383 q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out')
383 q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out')
384
384
385 q.bind_in(f.client_url('mux'))
385 q.bind_in(f.client_url('mux'))
386 q.setsockopt_in(zmq.IDENTITY, b'mux_in')
386 q.setsockopt_in(zmq.IDENTITY, b'mux_in')
387 q.bind_out(f.engine_url('mux'))
387 q.bind_out(f.engine_url('mux'))
388 q.setsockopt_out(zmq.IDENTITY, b'mux_out')
388 q.setsockopt_out(zmq.IDENTITY, b'mux_out')
389 q.connect_mon(monitor_url)
389 q.connect_mon(monitor_url)
390 q.daemon=True
390 q.daemon=True
391 children.append(q)
391 children.append(q)
392
392
393 # Control Queue (in a Process)
393 # Control Queue (in a Process)
394 q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol')
394 q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol')
395 q.bind_in(f.client_url('control'))
395 q.bind_in(f.client_url('control'))
396 q.setsockopt_in(zmq.IDENTITY, b'control_in')
396 q.setsockopt_in(zmq.IDENTITY, b'control_in')
397 q.bind_out(f.engine_url('control'))
397 q.bind_out(f.engine_url('control'))
398 q.setsockopt_out(zmq.IDENTITY, b'control_out')
398 q.setsockopt_out(zmq.IDENTITY, b'control_out')
399 q.connect_mon(monitor_url)
399 q.connect_mon(monitor_url)
400 q.daemon=True
400 q.daemon=True
401 children.append(q)
401 children.append(q)
402 try:
402 try:
403 scheme = self.config.TaskScheduler.scheme_name
403 scheme = self.config.TaskScheduler.scheme_name
404 except AttributeError:
404 except AttributeError:
405 scheme = TaskScheduler.scheme_name.get_default_value()
405 scheme = TaskScheduler.scheme_name.get_default_value()
406 # Task Queue (in a Process)
406 # Task Queue (in a Process)
407 if scheme == 'pure':
407 if scheme == 'pure':
408 self.log.warn("task::using pure DEALER Task scheduler")
408 self.log.warn("task::using pure DEALER Task scheduler")
409 q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask')
409 q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask')
410 # q.setsockopt_out(zmq.HWM, hub.hwm)
410 # q.setsockopt_out(zmq.HWM, hub.hwm)
411 q.bind_in(f.client_url('task'))
411 q.bind_in(f.client_url('task'))
412 q.setsockopt_in(zmq.IDENTITY, b'task_in')
412 q.setsockopt_in(zmq.IDENTITY, b'task_in')
413 q.bind_out(f.engine_url('task'))
413 q.bind_out(f.engine_url('task'))
414 q.setsockopt_out(zmq.IDENTITY, b'task_out')
414 q.setsockopt_out(zmq.IDENTITY, b'task_out')
415 q.connect_mon(monitor_url)
415 q.connect_mon(monitor_url)
416 q.daemon=True
416 q.daemon=True
417 children.append(q)
417 children.append(q)
418 elif scheme == 'none':
418 elif scheme == 'none':
419 self.log.warn("task::using no Task scheduler")
419 self.log.warn("task::using no Task scheduler")
420
420
421 else:
421 else:
422 self.log.info("task::using Python %s Task scheduler"%scheme)
422 self.log.info("task::using Python %s Task scheduler"%scheme)
423 sargs = (f.client_url('task'), f.engine_url('task'),
423 sargs = (f.client_url('task'), f.engine_url('task'),
424 monitor_url, disambiguate_url(f.client_url('notification')),
424 monitor_url, disambiguate_url(f.client_url('notification')),
425 disambiguate_url(f.client_url('registration')),
425 disambiguate_url(f.client_url('registration')),
426 )
426 )
427 kwargs = dict(logname='scheduler', loglevel=self.log_level,
427 kwargs = dict(logname='scheduler', loglevel=self.log_level,
428 log_url = self.log_url, config=dict(self.config))
428 log_url = self.log_url, config=dict(self.config))
429 if 'Process' in self.mq_class:
429 if 'Process' in self.mq_class:
430 # run the Python scheduler in a Process
430 # run the Python scheduler in a Process
431 q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs)
431 q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs)
432 q.daemon=True
432 q.daemon=True
433 children.append(q)
433 children.append(q)
434 else:
434 else:
435 # single-threaded Controller
435 # single-threaded Controller
436 kwargs['in_thread'] = True
436 kwargs['in_thread'] = True
437 launch_scheduler(*sargs, **kwargs)
437 launch_scheduler(*sargs, **kwargs)
438
438
439 # set unlimited HWM for all relay devices
439 # set unlimited HWM for all relay devices
440 if hasattr(zmq, 'SNDHWM'):
440 if hasattr(zmq, 'SNDHWM'):
441 q = children[0]
441 q = children[0]
442 q.setsockopt_in(zmq.RCVHWM, 0)
442 q.setsockopt_in(zmq.RCVHWM, 0)
443 q.setsockopt_out(zmq.SNDHWM, 0)
443 q.setsockopt_out(zmq.SNDHWM, 0)
444
444
445 for q in children[1:]:
445 for q in children[1:]:
446 if not hasattr(q, 'setsockopt_in'):
446 if not hasattr(q, 'setsockopt_in'):
447 continue
447 continue
448 q.setsockopt_in(zmq.SNDHWM, 0)
448 q.setsockopt_in(zmq.SNDHWM, 0)
449 q.setsockopt_in(zmq.RCVHWM, 0)
449 q.setsockopt_in(zmq.RCVHWM, 0)
450 q.setsockopt_out(zmq.SNDHWM, 0)
450 q.setsockopt_out(zmq.SNDHWM, 0)
451 q.setsockopt_out(zmq.RCVHWM, 0)
451 q.setsockopt_out(zmq.RCVHWM, 0)
452 q.setsockopt_mon(zmq.SNDHWM, 0)
452 q.setsockopt_mon(zmq.SNDHWM, 0)
453
453
454
454
455 def terminate_children(self):
455 def terminate_children(self):
456 child_procs = []
456 child_procs = []
457 for child in self.children:
457 for child in self.children:
458 if isinstance(child, ProcessMonitoredQueue):
458 if isinstance(child, ProcessMonitoredQueue):
459 child_procs.append(child.launcher)
459 child_procs.append(child.launcher)
460 elif isinstance(child, Process):
460 elif isinstance(child, Process):
461 child_procs.append(child)
461 child_procs.append(child)
462 if child_procs:
462 if child_procs:
463 self.log.critical("terminating children...")
463 self.log.critical("terminating children...")
464 for child in child_procs:
464 for child in child_procs:
465 try:
465 try:
466 child.terminate()
466 child.terminate()
467 except OSError:
467 except OSError:
468 # already dead
468 # already dead
469 pass
469 pass
470
470
471 def handle_signal(self, sig, frame):
471 def handle_signal(self, sig, frame):
472 self.log.critical("Received signal %i, shutting down", sig)
472 self.log.critical("Received signal %i, shutting down", sig)
473 self.terminate_children()
473 self.terminate_children()
474 self.loop.stop()
474 self.loop.stop()
475
475
476 def init_signal(self):
476 def init_signal(self):
477 for sig in (SIGINT, SIGABRT, SIGTERM):
477 for sig in (SIGINT, SIGABRT, SIGTERM):
478 signal(sig, self.handle_signal)
478 signal(sig, self.handle_signal)
479
479
480 def do_import_statements(self):
480 def do_import_statements(self):
481 statements = self.import_statements
481 statements = self.import_statements
482 for s in statements:
482 for s in statements:
483 try:
483 try:
484 self.log.msg("Executing statement: '%s'" % s)
484 self.log.msg("Executing statement: '%s'" % s)
485 exec s in globals(), locals()
485 exec s in globals(), locals()
486 except:
486 except:
487 self.log.msg("Error running statement: %s" % s)
487 self.log.msg("Error running statement: %s" % s)
488
488
489 def forward_logging(self):
489 def forward_logging(self):
490 if self.log_url:
490 if self.log_url:
491 self.log.info("Forwarding logging to %s"%self.log_url)
491 self.log.info("Forwarding logging to %s"%self.log_url)
492 context = zmq.Context.instance()
492 context = zmq.Context.instance()
493 lsock = context.socket(zmq.PUB)
493 lsock = context.socket(zmq.PUB)
494 lsock.connect(self.log_url)
494 lsock.connect(self.log_url)
495 handler = PUBHandler(lsock)
495 handler = PUBHandler(lsock)
496 handler.root_topic = 'controller'
496 handler.root_topic = 'controller'
497 handler.setLevel(self.log_level)
497 handler.setLevel(self.log_level)
498 self.log.addHandler(handler)
498 self.log.addHandler(handler)
499
499
500 @catch_config_error
500 @catch_config_error
501 def initialize(self, argv=None):
501 def initialize(self, argv=None):
502 super(IPControllerApp, self).initialize(argv)
502 super(IPControllerApp, self).initialize(argv)
503 self.forward_logging()
503 self.forward_logging()
504 self.load_secondary_config()
504 self.load_secondary_config()
505 self.init_hub()
505 self.init_hub()
506 self.init_schedulers()
506 self.init_schedulers()
507
507
508 def start(self):
508 def start(self):
509 # Start the subprocesses:
509 # Start the subprocesses:
510 self.factory.start()
510 self.factory.start()
511 # children must be started before signals are setup,
511 # children must be started before signals are setup,
512 # otherwise signal-handling will fire multiple times
512 # otherwise signal-handling will fire multiple times
513 for child in self.children:
513 for child in self.children:
514 child.start()
514 child.start()
515 self.init_signal()
515 self.init_signal()
516
516
517 self.write_pid_file(overwrite=True)
517 self.write_pid_file(overwrite=True)
518
518
519 try:
519 try:
520 self.factory.loop.start()
520 self.factory.loop.start()
521 except KeyboardInterrupt:
521 except KeyboardInterrupt:
522 self.log.critical("Interrupted, Exiting...\n")
522 self.log.critical("Interrupted, Exiting...\n")
523 finally:
523 finally:
524 self.cleanup_connection_files()
524 self.cleanup_connection_files()
525
525
526
526
527 def launch_new_instance(*args, **kwargs):
527 def launch_new_instance(*args, **kwargs):
528 """Create and run the IPython controller"""
528 """Create and run the IPython controller"""
529 if sys.platform == 'win32':
529 if sys.platform == 'win32':
530 # make sure we don't get called from a multiprocessing subprocess
530 # make sure we don't get called from a multiprocessing subprocess
531 # this can result in infinite Controllers being started on Windows
531 # this can result in infinite Controllers being started on Windows
532 # which doesn't have a proper fork, so multiprocessing is wonky
532 # which doesn't have a proper fork, so multiprocessing is wonky
533
533
534 # this only comes up when IPython has been installed using vanilla
534 # this only comes up when IPython has been installed using vanilla
535 # setuptools, and *not* distribute.
535 # setuptools, and *not* distribute.
536 import multiprocessing
536 import multiprocessing
537 p = multiprocessing.current_process()
537 p = multiprocessing.current_process()
538 # the main process has name 'MainProcess'
538 # the main process has name 'MainProcess'
539 # subprocesses will have names like 'Process-1'
539 # subprocesses will have names like 'Process-1'
540 if p.name != 'MainProcess':
540 if p.name != 'MainProcess':
541 # we are a subprocess, don't start another Controller!
541 # we are a subprocess, don't start another Controller!
542 return
542 return
543 return IPControllerApp.launch_instance(*args, **kwargs)
543 return IPControllerApp.launch_instance(*args, **kwargs)
544
544
545
545
546 if __name__ == '__main__':
546 if __name__ == '__main__':
547 launch_new_instance()
547 launch_new_instance()
@@ -1,115 +1,117 b''
1 """
1 """
2 A simple logger object that consolidates messages incoming from ipcluster processes.
2 A simple logger object that consolidates messages incoming from ipcluster processes.
3
3
4 Authors:
4 Authors:
5
5
6 * MinRK
6 * MinRK
7
7
8 """
8 """
9
9
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11 # Copyright (C) 2011 The IPython Development Team
11 # Copyright (C) 2011 The IPython Development Team
12 #
12 #
13 # Distributed under the terms of the BSD License. The full license is in
13 # Distributed under the terms of the BSD License. The full license is in
14 # the file COPYING, distributed as part of this software.
14 # the file COPYING, distributed as part of this software.
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16
16
17 #-----------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18 # Imports
18 # Imports
19 #-----------------------------------------------------------------------------
19 #-----------------------------------------------------------------------------
20
20
21
21
22 import logging
22 import logging
23 import sys
23 import sys
24
24
25 import zmq
25 import zmq
26 from zmq.eventloop import ioloop, zmqstream
26 from zmq.eventloop import ioloop, zmqstream
27
27
28 from IPython.config.configurable import LoggingConfigurable
28 from IPython.config.configurable import LoggingConfigurable
29 from IPython.utils.localinterfaces import LOCALHOST
29 from IPython.utils.localinterfaces import localhost
30 from IPython.utils.traitlets import Int, Unicode, Instance, List
30 from IPython.utils.traitlets import Int, Unicode, Instance, List
31
31
32 #-----------------------------------------------------------------------------
32 #-----------------------------------------------------------------------------
33 # Classes
33 # Classes
34 #-----------------------------------------------------------------------------
34 #-----------------------------------------------------------------------------
35
35
36
36
37 class LogWatcher(LoggingConfigurable):
37 class LogWatcher(LoggingConfigurable):
38 """A simple class that receives messages on a SUB socket, as published
38 """A simple class that receives messages on a SUB socket, as published
39 by subclasses of `zmq.log.handlers.PUBHandler`, and logs them itself.
39 by subclasses of `zmq.log.handlers.PUBHandler`, and logs them itself.
40
40
41 This can subscribe to multiple topics, but defaults to all topics.
41 This can subscribe to multiple topics, but defaults to all topics.
42 """
42 """
43
43
44 # configurables
44 # configurables
45 topics = List([''], config=True,
45 topics = List([''], config=True,
46 help="The ZMQ topics to subscribe to. Default is to subscribe to all messages")
46 help="The ZMQ topics to subscribe to. Default is to subscribe to all messages")
47 url = Unicode('tcp://%s:20202' % LOCALHOST, config=True,
47 url = Unicode(config=True,
48 help="ZMQ url on which to listen for log messages")
48 help="ZMQ url on which to listen for log messages")
49 def _url_default(self):
50 return 'tcp://%s:20202' % localhost()
49
51
50 # internals
52 # internals
51 stream = Instance('zmq.eventloop.zmqstream.ZMQStream')
53 stream = Instance('zmq.eventloop.zmqstream.ZMQStream')
52
54
53 context = Instance(zmq.Context)
55 context = Instance(zmq.Context)
54 def _context_default(self):
56 def _context_default(self):
55 return zmq.Context.instance()
57 return zmq.Context.instance()
56
58
57 loop = Instance(zmq.eventloop.ioloop.IOLoop)
59 loop = Instance(zmq.eventloop.ioloop.IOLoop)
58 def _loop_default(self):
60 def _loop_default(self):
59 return ioloop.IOLoop.instance()
61 return ioloop.IOLoop.instance()
60
62
61 def __init__(self, **kwargs):
63 def __init__(self, **kwargs):
62 super(LogWatcher, self).__init__(**kwargs)
64 super(LogWatcher, self).__init__(**kwargs)
63 s = self.context.socket(zmq.SUB)
65 s = self.context.socket(zmq.SUB)
64 s.bind(self.url)
66 s.bind(self.url)
65 self.stream = zmqstream.ZMQStream(s, self.loop)
67 self.stream = zmqstream.ZMQStream(s, self.loop)
66 self.subscribe()
68 self.subscribe()
67 self.on_trait_change(self.subscribe, 'topics')
69 self.on_trait_change(self.subscribe, 'topics')
68
70
69 def start(self):
71 def start(self):
70 self.stream.on_recv(self.log_message)
72 self.stream.on_recv(self.log_message)
71
73
72 def stop(self):
74 def stop(self):
73 self.stream.stop_on_recv()
75 self.stream.stop_on_recv()
74
76
75 def subscribe(self):
77 def subscribe(self):
76 """Update our SUB socket's subscriptions."""
78 """Update our SUB socket's subscriptions."""
77 self.stream.setsockopt(zmq.UNSUBSCRIBE, '')
79 self.stream.setsockopt(zmq.UNSUBSCRIBE, '')
78 if '' in self.topics:
80 if '' in self.topics:
79 self.log.debug("Subscribing to: everything")
81 self.log.debug("Subscribing to: everything")
80 self.stream.setsockopt(zmq.SUBSCRIBE, '')
82 self.stream.setsockopt(zmq.SUBSCRIBE, '')
81 else:
83 else:
82 for topic in self.topics:
84 for topic in self.topics:
83 self.log.debug("Subscribing to: %r"%(topic))
85 self.log.debug("Subscribing to: %r"%(topic))
84 self.stream.setsockopt(zmq.SUBSCRIBE, topic)
86 self.stream.setsockopt(zmq.SUBSCRIBE, topic)
85
87
86 def _extract_level(self, topic_str):
88 def _extract_level(self, topic_str):
87 """Turn 'engine.0.INFO.extra' into (logging.INFO, 'engine.0.extra')"""
89 """Turn 'engine.0.INFO.extra' into (logging.INFO, 'engine.0.extra')"""
88 topics = topic_str.split('.')
90 topics = topic_str.split('.')
89 for idx,t in enumerate(topics):
91 for idx,t in enumerate(topics):
90 level = getattr(logging, t, None)
92 level = getattr(logging, t, None)
91 if level is not None:
93 if level is not None:
92 break
94 break
93
95
94 if level is None:
96 if level is None:
95 level = logging.INFO
97 level = logging.INFO
96 else:
98 else:
97 topics.pop(idx)
99 topics.pop(idx)
98
100
99 return level, '.'.join(topics)
101 return level, '.'.join(topics)
100
102
101
103
102 def log_message(self, raw):
104 def log_message(self, raw):
103 """receive and parse a message, then log it."""
105 """receive and parse a message, then log it."""
104 if len(raw) != 2 or '.' not in raw[0]:
106 if len(raw) != 2 or '.' not in raw[0]:
105 self.log.error("Invalid log message: %s"%raw)
107 self.log.error("Invalid log message: %s"%raw)
106 return
108 return
107 else:
109 else:
108 topic, msg = raw
110 topic, msg = raw
109 # don't newline, since log messages always newline:
111 # don't newline, since log messages always newline:
110 topic,level_name = topic.rsplit('.',1)
112 topic,level_name = topic.rsplit('.',1)
111 level,topic = self._extract_level(topic)
113 level,topic = self._extract_level(topic)
112 if msg[-1] == '\n':
114 if msg[-1] == '\n':
113 msg = msg[:-1]
115 msg = msg[:-1]
114 self.log.log(level, "[%s] %s" % (topic, msg))
116 self.log.log(level, "[%s] %s" % (topic, msg))
115
117
@@ -1,1858 +1,1858 b''
1 """A semi-synchronous Client for the ZMQ cluster
1 """A semi-synchronous Client for the ZMQ cluster
2
2
3 Authors:
3 Authors:
4
4
5 * MinRK
5 * MinRK
6 """
6 """
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (C) 2010-2011 The IPython Development Team
8 # Copyright (C) 2010-2011 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17
17
18 import os
18 import os
19 import json
19 import json
20 import sys
20 import sys
21 from threading import Thread, Event
21 from threading import Thread, Event
22 import time
22 import time
23 import warnings
23 import warnings
24 from datetime import datetime
24 from datetime import datetime
25 from getpass import getpass
25 from getpass import getpass
26 from pprint import pprint
26 from pprint import pprint
27
27
28 pjoin = os.path.join
28 pjoin = os.path.join
29
29
30 import zmq
30 import zmq
31 # from zmq.eventloop import ioloop, zmqstream
31 # from zmq.eventloop import ioloop, zmqstream
32
32
33 from IPython.config.configurable import MultipleInstanceError
33 from IPython.config.configurable import MultipleInstanceError
34 from IPython.core.application import BaseIPythonApplication
34 from IPython.core.application import BaseIPythonApplication
35 from IPython.core.profiledir import ProfileDir, ProfileDirError
35 from IPython.core.profiledir import ProfileDir, ProfileDirError
36
36
37 from IPython.utils.capture import RichOutput
37 from IPython.utils.capture import RichOutput
38 from IPython.utils.coloransi import TermColors
38 from IPython.utils.coloransi import TermColors
39 from IPython.utils.jsonutil import rekey
39 from IPython.utils.jsonutil import rekey
40 from IPython.utils.localinterfaces import LOCALHOST, LOCAL_IPS
40 from IPython.utils.localinterfaces import localhost, is_local_ip
41 from IPython.utils.path import get_ipython_dir
41 from IPython.utils.path import get_ipython_dir
42 from IPython.utils.py3compat import cast_bytes
42 from IPython.utils.py3compat import cast_bytes
43 from IPython.utils.traitlets import (HasTraits, Integer, Instance, Unicode,
43 from IPython.utils.traitlets import (HasTraits, Integer, Instance, Unicode,
44 Dict, List, Bool, Set, Any)
44 Dict, List, Bool, Set, Any)
45 from IPython.external.decorator import decorator
45 from IPython.external.decorator import decorator
46 from IPython.external.ssh import tunnel
46 from IPython.external.ssh import tunnel
47
47
48 from IPython.parallel import Reference
48 from IPython.parallel import Reference
49 from IPython.parallel import error
49 from IPython.parallel import error
50 from IPython.parallel import util
50 from IPython.parallel import util
51
51
52 from IPython.kernel.zmq.session import Session, Message
52 from IPython.kernel.zmq.session import Session, Message
53 from IPython.kernel.zmq import serialize
53 from IPython.kernel.zmq import serialize
54
54
55 from .asyncresult import AsyncResult, AsyncHubResult
55 from .asyncresult import AsyncResult, AsyncHubResult
56 from .view import DirectView, LoadBalancedView
56 from .view import DirectView, LoadBalancedView
57
57
58 if sys.version_info[0] >= 3:
58 if sys.version_info[0] >= 3:
59 # xrange is used in a couple 'isinstance' tests in py2
59 # xrange is used in a couple 'isinstance' tests in py2
60 # should be just 'range' in 3k
60 # should be just 'range' in 3k
61 xrange = range
61 xrange = range
62
62
63 #--------------------------------------------------------------------------
63 #--------------------------------------------------------------------------
64 # Decorators for Client methods
64 # Decorators for Client methods
65 #--------------------------------------------------------------------------
65 #--------------------------------------------------------------------------
66
66
67 @decorator
67 @decorator
68 def spin_first(f, self, *args, **kwargs):
68 def spin_first(f, self, *args, **kwargs):
69 """Call spin() to sync state prior to calling the method."""
69 """Call spin() to sync state prior to calling the method."""
70 self.spin()
70 self.spin()
71 return f(self, *args, **kwargs)
71 return f(self, *args, **kwargs)
72
72
73
73
74 #--------------------------------------------------------------------------
74 #--------------------------------------------------------------------------
75 # Classes
75 # Classes
76 #--------------------------------------------------------------------------
76 #--------------------------------------------------------------------------
77
77
78
78
79 class ExecuteReply(RichOutput):
79 class ExecuteReply(RichOutput):
80 """wrapper for finished Execute results"""
80 """wrapper for finished Execute results"""
81 def __init__(self, msg_id, content, metadata):
81 def __init__(self, msg_id, content, metadata):
82 self.msg_id = msg_id
82 self.msg_id = msg_id
83 self._content = content
83 self._content = content
84 self.execution_count = content['execution_count']
84 self.execution_count = content['execution_count']
85 self.metadata = metadata
85 self.metadata = metadata
86
86
87 # RichOutput overrides
87 # RichOutput overrides
88
88
89 @property
89 @property
90 def source(self):
90 def source(self):
91 pyout = self.metadata['pyout']
91 pyout = self.metadata['pyout']
92 if pyout:
92 if pyout:
93 return pyout.get('source', '')
93 return pyout.get('source', '')
94
94
95 @property
95 @property
96 def data(self):
96 def data(self):
97 pyout = self.metadata['pyout']
97 pyout = self.metadata['pyout']
98 if pyout:
98 if pyout:
99 return pyout.get('data', {})
99 return pyout.get('data', {})
100
100
101 @property
101 @property
102 def _metadata(self):
102 def _metadata(self):
103 pyout = self.metadata['pyout']
103 pyout = self.metadata['pyout']
104 if pyout:
104 if pyout:
105 return pyout.get('metadata', {})
105 return pyout.get('metadata', {})
106
106
107 def display(self):
107 def display(self):
108 from IPython.display import publish_display_data
108 from IPython.display import publish_display_data
109 publish_display_data(self.source, self.data, self.metadata)
109 publish_display_data(self.source, self.data, self.metadata)
110
110
111 def _repr_mime_(self, mime):
111 def _repr_mime_(self, mime):
112 if mime not in self.data:
112 if mime not in self.data:
113 return
113 return
114 data = self.data[mime]
114 data = self.data[mime]
115 if mime in self._metadata:
115 if mime in self._metadata:
116 return data, self._metadata[mime]
116 return data, self._metadata[mime]
117 else:
117 else:
118 return data
118 return data
119
119
120 def __getitem__(self, key):
120 def __getitem__(self, key):
121 return self.metadata[key]
121 return self.metadata[key]
122
122
123 def __getattr__(self, key):
123 def __getattr__(self, key):
124 if key not in self.metadata:
124 if key not in self.metadata:
125 raise AttributeError(key)
125 raise AttributeError(key)
126 return self.metadata[key]
126 return self.metadata[key]
127
127
128 def __repr__(self):
128 def __repr__(self):
129 pyout = self.metadata['pyout'] or {'data':{}}
129 pyout = self.metadata['pyout'] or {'data':{}}
130 text_out = pyout['data'].get('text/plain', '')
130 text_out = pyout['data'].get('text/plain', '')
131 if len(text_out) > 32:
131 if len(text_out) > 32:
132 text_out = text_out[:29] + '...'
132 text_out = text_out[:29] + '...'
133
133
134 return "<ExecuteReply[%i]: %s>" % (self.execution_count, text_out)
134 return "<ExecuteReply[%i]: %s>" % (self.execution_count, text_out)
135
135
136 def _repr_pretty_(self, p, cycle):
136 def _repr_pretty_(self, p, cycle):
137 pyout = self.metadata['pyout'] or {'data':{}}
137 pyout = self.metadata['pyout'] or {'data':{}}
138 text_out = pyout['data'].get('text/plain', '')
138 text_out = pyout['data'].get('text/plain', '')
139
139
140 if not text_out:
140 if not text_out:
141 return
141 return
142
142
143 try:
143 try:
144 ip = get_ipython()
144 ip = get_ipython()
145 except NameError:
145 except NameError:
146 colors = "NoColor"
146 colors = "NoColor"
147 else:
147 else:
148 colors = ip.colors
148 colors = ip.colors
149
149
150 if colors == "NoColor":
150 if colors == "NoColor":
151 out = normal = ""
151 out = normal = ""
152 else:
152 else:
153 out = TermColors.Red
153 out = TermColors.Red
154 normal = TermColors.Normal
154 normal = TermColors.Normal
155
155
156 if '\n' in text_out and not text_out.startswith('\n'):
156 if '\n' in text_out and not text_out.startswith('\n'):
157 # add newline for multiline reprs
157 # add newline for multiline reprs
158 text_out = '\n' + text_out
158 text_out = '\n' + text_out
159
159
160 p.text(
160 p.text(
161 out + u'Out[%i:%i]: ' % (
161 out + u'Out[%i:%i]: ' % (
162 self.metadata['engine_id'], self.execution_count
162 self.metadata['engine_id'], self.execution_count
163 ) + normal + text_out
163 ) + normal + text_out
164 )
164 )
165
165
166
166
167 class Metadata(dict):
167 class Metadata(dict):
168 """Subclass of dict for initializing metadata values.
168 """Subclass of dict for initializing metadata values.
169
169
170 Attribute access works on keys.
170 Attribute access works on keys.
171
171
172 These objects have a strict set of keys - errors will raise if you try
172 These objects have a strict set of keys - errors will raise if you try
173 to add new keys.
173 to add new keys.
174 """
174 """
175 def __init__(self, *args, **kwargs):
175 def __init__(self, *args, **kwargs):
176 dict.__init__(self)
176 dict.__init__(self)
177 md = {'msg_id' : None,
177 md = {'msg_id' : None,
178 'submitted' : None,
178 'submitted' : None,
179 'started' : None,
179 'started' : None,
180 'completed' : None,
180 'completed' : None,
181 'received' : None,
181 'received' : None,
182 'engine_uuid' : None,
182 'engine_uuid' : None,
183 'engine_id' : None,
183 'engine_id' : None,
184 'follow' : None,
184 'follow' : None,
185 'after' : None,
185 'after' : None,
186 'status' : None,
186 'status' : None,
187
187
188 'pyin' : None,
188 'pyin' : None,
189 'pyout' : None,
189 'pyout' : None,
190 'pyerr' : None,
190 'pyerr' : None,
191 'stdout' : '',
191 'stdout' : '',
192 'stderr' : '',
192 'stderr' : '',
193 'outputs' : [],
193 'outputs' : [],
194 'data': {},
194 'data': {},
195 'outputs_ready' : False,
195 'outputs_ready' : False,
196 }
196 }
197 self.update(md)
197 self.update(md)
198 self.update(dict(*args, **kwargs))
198 self.update(dict(*args, **kwargs))
199
199
200 def __getattr__(self, key):
200 def __getattr__(self, key):
201 """getattr aliased to getitem"""
201 """getattr aliased to getitem"""
202 if key in self.iterkeys():
202 if key in self.iterkeys():
203 return self[key]
203 return self[key]
204 else:
204 else:
205 raise AttributeError(key)
205 raise AttributeError(key)
206
206
207 def __setattr__(self, key, value):
207 def __setattr__(self, key, value):
208 """setattr aliased to setitem, with strict"""
208 """setattr aliased to setitem, with strict"""
209 if key in self.iterkeys():
209 if key in self.iterkeys():
210 self[key] = value
210 self[key] = value
211 else:
211 else:
212 raise AttributeError(key)
212 raise AttributeError(key)
213
213
214 def __setitem__(self, key, value):
214 def __setitem__(self, key, value):
215 """strict static key enforcement"""
215 """strict static key enforcement"""
216 if key in self.iterkeys():
216 if key in self.iterkeys():
217 dict.__setitem__(self, key, value)
217 dict.__setitem__(self, key, value)
218 else:
218 else:
219 raise KeyError(key)
219 raise KeyError(key)
220
220
221
221
222 class Client(HasTraits):
222 class Client(HasTraits):
223 """A semi-synchronous client to the IPython ZMQ cluster
223 """A semi-synchronous client to the IPython ZMQ cluster
224
224
225 Parameters
225 Parameters
226 ----------
226 ----------
227
227
228 url_file : str/unicode; path to ipcontroller-client.json
228 url_file : str/unicode; path to ipcontroller-client.json
229 This JSON file should contain all the information needed to connect to a cluster,
229 This JSON file should contain all the information needed to connect to a cluster,
230 and is likely the only argument needed.
230 and is likely the only argument needed.
231 Connection information for the Hub's registration. If a json connector
231 Connection information for the Hub's registration. If a json connector
232 file is given, then likely no further configuration is necessary.
232 file is given, then likely no further configuration is necessary.
233 [Default: use profile]
233 [Default: use profile]
234 profile : bytes
234 profile : bytes
235 The name of the Cluster profile to be used to find connector information.
235 The name of the Cluster profile to be used to find connector information.
236 If run from an IPython application, the default profile will be the same
236 If run from an IPython application, the default profile will be the same
237 as the running application, otherwise it will be 'default'.
237 as the running application, otherwise it will be 'default'.
238 cluster_id : str
238 cluster_id : str
239 String id to added to runtime files, to prevent name collisions when using
239 String id to added to runtime files, to prevent name collisions when using
240 multiple clusters with a single profile simultaneously.
240 multiple clusters with a single profile simultaneously.
241 When set, will look for files named like: 'ipcontroller-<cluster_id>-client.json'
241 When set, will look for files named like: 'ipcontroller-<cluster_id>-client.json'
242 Since this is text inserted into filenames, typical recommendations apply:
242 Since this is text inserted into filenames, typical recommendations apply:
243 Simple character strings are ideal, and spaces are not recommended (but
243 Simple character strings are ideal, and spaces are not recommended (but
244 should generally work)
244 should generally work)
245 context : zmq.Context
245 context : zmq.Context
246 Pass an existing zmq.Context instance, otherwise the client will create its own.
246 Pass an existing zmq.Context instance, otherwise the client will create its own.
247 debug : bool
247 debug : bool
248 flag for lots of message printing for debug purposes
248 flag for lots of message printing for debug purposes
249 timeout : int/float
249 timeout : int/float
250 time (in seconds) to wait for connection replies from the Hub
250 time (in seconds) to wait for connection replies from the Hub
251 [Default: 10]
251 [Default: 10]
252
252
253 #-------------- session related args ----------------
253 #-------------- session related args ----------------
254
254
255 config : Config object
255 config : Config object
256 If specified, this will be relayed to the Session for configuration
256 If specified, this will be relayed to the Session for configuration
257 username : str
257 username : str
258 set username for the session object
258 set username for the session object
259
259
260 #-------------- ssh related args ----------------
260 #-------------- ssh related args ----------------
261 # These are args for configuring the ssh tunnel to be used
261 # These are args for configuring the ssh tunnel to be used
262 # credentials are used to forward connections over ssh to the Controller
262 # credentials are used to forward connections over ssh to the Controller
263 # Note that the ip given in `addr` needs to be relative to sshserver
263 # Note that the ip given in `addr` needs to be relative to sshserver
264 # The most basic case is to leave addr as pointing to localhost (127.0.0.1),
264 # The most basic case is to leave addr as pointing to localhost (127.0.0.1),
265 # and set sshserver as the same machine the Controller is on. However,
265 # and set sshserver as the same machine the Controller is on. However,
266 # the only requirement is that sshserver is able to see the Controller
266 # the only requirement is that sshserver is able to see the Controller
267 # (i.e. is within the same trusted network).
267 # (i.e. is within the same trusted network).
268
268
269 sshserver : str
269 sshserver : str
270 A string of the form passed to ssh, i.e. 'server.tld' or 'user@server.tld:port'
270 A string of the form passed to ssh, i.e. 'server.tld' or 'user@server.tld:port'
271 If keyfile or password is specified, and this is not, it will default to
271 If keyfile or password is specified, and this is not, it will default to
272 the ip given in addr.
272 the ip given in addr.
273 sshkey : str; path to ssh private key file
273 sshkey : str; path to ssh private key file
274 This specifies a key to be used in ssh login, default None.
274 This specifies a key to be used in ssh login, default None.
275 Regular default ssh keys will be used without specifying this argument.
275 Regular default ssh keys will be used without specifying this argument.
276 password : str
276 password : str
277 Your ssh password to sshserver. Note that if this is left None,
277 Your ssh password to sshserver. Note that if this is left None,
278 you will be prompted for it if passwordless key based login is unavailable.
278 you will be prompted for it if passwordless key based login is unavailable.
279 paramiko : bool
279 paramiko : bool
280 flag for whether to use paramiko instead of shell ssh for tunneling.
280 flag for whether to use paramiko instead of shell ssh for tunneling.
281 [default: True on win32, False else]
281 [default: True on win32, False else]
282
282
283
283
284 Attributes
284 Attributes
285 ----------
285 ----------
286
286
287 ids : list of int engine IDs
287 ids : list of int engine IDs
288 requesting the ids attribute always synchronizes
288 requesting the ids attribute always synchronizes
289 the registration state. To request ids without synchronization,
289 the registration state. To request ids without synchronization,
290 use semi-private _ids attributes.
290 use semi-private _ids attributes.
291
291
292 history : list of msg_ids
292 history : list of msg_ids
293 a list of msg_ids, keeping track of all the execution
293 a list of msg_ids, keeping track of all the execution
294 messages you have submitted in order.
294 messages you have submitted in order.
295
295
296 outstanding : set of msg_ids
296 outstanding : set of msg_ids
297 a set of msg_ids that have been submitted, but whose
297 a set of msg_ids that have been submitted, but whose
298 results have not yet been received.
298 results have not yet been received.
299
299
300 results : dict
300 results : dict
301 a dict of all our results, keyed by msg_id
301 a dict of all our results, keyed by msg_id
302
302
303 block : bool
303 block : bool
304 determines default behavior when block not specified
304 determines default behavior when block not specified
305 in execution methods
305 in execution methods
306
306
307 Methods
307 Methods
308 -------
308 -------
309
309
310 spin
310 spin
311 flushes incoming results and registration state changes
311 flushes incoming results and registration state changes
312 control methods spin, and requesting `ids` also ensures up to date
312 control methods spin, and requesting `ids` also ensures up to date
313
313
314 wait
314 wait
315 wait on one or more msg_ids
315 wait on one or more msg_ids
316
316
317 execution methods
317 execution methods
318 apply
318 apply
319 legacy: execute, run
319 legacy: execute, run
320
320
321 data movement
321 data movement
322 push, pull, scatter, gather
322 push, pull, scatter, gather
323
323
324 query methods
324 query methods
325 queue_status, get_result, purge, result_status
325 queue_status, get_result, purge, result_status
326
326
327 control methods
327 control methods
328 abort, shutdown
328 abort, shutdown
329
329
330 """
330 """
331
331
332
332
333 block = Bool(False)
333 block = Bool(False)
334 outstanding = Set()
334 outstanding = Set()
335 results = Instance('collections.defaultdict', (dict,))
335 results = Instance('collections.defaultdict', (dict,))
336 metadata = Instance('collections.defaultdict', (Metadata,))
336 metadata = Instance('collections.defaultdict', (Metadata,))
337 history = List()
337 history = List()
338 debug = Bool(False)
338 debug = Bool(False)
339 _spin_thread = Any()
339 _spin_thread = Any()
340 _stop_spinning = Any()
340 _stop_spinning = Any()
341
341
342 profile=Unicode()
342 profile=Unicode()
343 def _profile_default(self):
343 def _profile_default(self):
344 if BaseIPythonApplication.initialized():
344 if BaseIPythonApplication.initialized():
345 # an IPython app *might* be running, try to get its profile
345 # an IPython app *might* be running, try to get its profile
346 try:
346 try:
347 return BaseIPythonApplication.instance().profile
347 return BaseIPythonApplication.instance().profile
348 except (AttributeError, MultipleInstanceError):
348 except (AttributeError, MultipleInstanceError):
349 # could be a *different* subclass of config.Application,
349 # could be a *different* subclass of config.Application,
350 # which would raise one of these two errors.
350 # which would raise one of these two errors.
351 return u'default'
351 return u'default'
352 else:
352 else:
353 return u'default'
353 return u'default'
354
354
355
355
356 _outstanding_dict = Instance('collections.defaultdict', (set,))
356 _outstanding_dict = Instance('collections.defaultdict', (set,))
357 _ids = List()
357 _ids = List()
358 _connected=Bool(False)
358 _connected=Bool(False)
359 _ssh=Bool(False)
359 _ssh=Bool(False)
360 _context = Instance('zmq.Context')
360 _context = Instance('zmq.Context')
361 _config = Dict()
361 _config = Dict()
362 _engines=Instance(util.ReverseDict, (), {})
362 _engines=Instance(util.ReverseDict, (), {})
363 # _hub_socket=Instance('zmq.Socket')
363 # _hub_socket=Instance('zmq.Socket')
364 _query_socket=Instance('zmq.Socket')
364 _query_socket=Instance('zmq.Socket')
365 _control_socket=Instance('zmq.Socket')
365 _control_socket=Instance('zmq.Socket')
366 _iopub_socket=Instance('zmq.Socket')
366 _iopub_socket=Instance('zmq.Socket')
367 _notification_socket=Instance('zmq.Socket')
367 _notification_socket=Instance('zmq.Socket')
368 _mux_socket=Instance('zmq.Socket')
368 _mux_socket=Instance('zmq.Socket')
369 _task_socket=Instance('zmq.Socket')
369 _task_socket=Instance('zmq.Socket')
370 _task_scheme=Unicode()
370 _task_scheme=Unicode()
371 _closed = False
371 _closed = False
372 _ignored_control_replies=Integer(0)
372 _ignored_control_replies=Integer(0)
373 _ignored_hub_replies=Integer(0)
373 _ignored_hub_replies=Integer(0)
374
374
375 def __new__(self, *args, **kw):
375 def __new__(self, *args, **kw):
376 # don't raise on positional args
376 # don't raise on positional args
377 return HasTraits.__new__(self, **kw)
377 return HasTraits.__new__(self, **kw)
378
378
379 def __init__(self, url_file=None, profile=None, profile_dir=None, ipython_dir=None,
379 def __init__(self, url_file=None, profile=None, profile_dir=None, ipython_dir=None,
380 context=None, debug=False,
380 context=None, debug=False,
381 sshserver=None, sshkey=None, password=None, paramiko=None,
381 sshserver=None, sshkey=None, password=None, paramiko=None,
382 timeout=10, cluster_id=None, **extra_args
382 timeout=10, cluster_id=None, **extra_args
383 ):
383 ):
384 if profile:
384 if profile:
385 super(Client, self).__init__(debug=debug, profile=profile)
385 super(Client, self).__init__(debug=debug, profile=profile)
386 else:
386 else:
387 super(Client, self).__init__(debug=debug)
387 super(Client, self).__init__(debug=debug)
388 if context is None:
388 if context is None:
389 context = zmq.Context.instance()
389 context = zmq.Context.instance()
390 self._context = context
390 self._context = context
391 self._stop_spinning = Event()
391 self._stop_spinning = Event()
392
392
393 if 'url_or_file' in extra_args:
393 if 'url_or_file' in extra_args:
394 url_file = extra_args['url_or_file']
394 url_file = extra_args['url_or_file']
395 warnings.warn("url_or_file arg no longer supported, use url_file", DeprecationWarning)
395 warnings.warn("url_or_file arg no longer supported, use url_file", DeprecationWarning)
396
396
397 if url_file and util.is_url(url_file):
397 if url_file and util.is_url(url_file):
398 raise ValueError("single urls cannot be specified, url-files must be used.")
398 raise ValueError("single urls cannot be specified, url-files must be used.")
399
399
400 self._setup_profile_dir(self.profile, profile_dir, ipython_dir)
400 self._setup_profile_dir(self.profile, profile_dir, ipython_dir)
401
401
402 if self._cd is not None:
402 if self._cd is not None:
403 if url_file is None:
403 if url_file is None:
404 if not cluster_id:
404 if not cluster_id:
405 client_json = 'ipcontroller-client.json'
405 client_json = 'ipcontroller-client.json'
406 else:
406 else:
407 client_json = 'ipcontroller-%s-client.json' % cluster_id
407 client_json = 'ipcontroller-%s-client.json' % cluster_id
408 url_file = pjoin(self._cd.security_dir, client_json)
408 url_file = pjoin(self._cd.security_dir, client_json)
409 if url_file is None:
409 if url_file is None:
410 raise ValueError(
410 raise ValueError(
411 "I can't find enough information to connect to a hub!"
411 "I can't find enough information to connect to a hub!"
412 " Please specify at least one of url_file or profile."
412 " Please specify at least one of url_file or profile."
413 )
413 )
414
414
415 with open(url_file) as f:
415 with open(url_file) as f:
416 cfg = json.load(f)
416 cfg = json.load(f)
417
417
418 self._task_scheme = cfg['task_scheme']
418 self._task_scheme = cfg['task_scheme']
419
419
420 # sync defaults from args, json:
420 # sync defaults from args, json:
421 if sshserver:
421 if sshserver:
422 cfg['ssh'] = sshserver
422 cfg['ssh'] = sshserver
423
423
424 location = cfg.setdefault('location', None)
424 location = cfg.setdefault('location', None)
425
425
426 proto,addr = cfg['interface'].split('://')
426 proto,addr = cfg['interface'].split('://')
427 addr = util.disambiguate_ip_address(addr, location)
427 addr = util.disambiguate_ip_address(addr, location)
428 cfg['interface'] = "%s://%s" % (proto, addr)
428 cfg['interface'] = "%s://%s" % (proto, addr)
429
429
430 # turn interface,port into full urls:
430 # turn interface,port into full urls:
431 for key in ('control', 'task', 'mux', 'iopub', 'notification', 'registration'):
431 for key in ('control', 'task', 'mux', 'iopub', 'notification', 'registration'):
432 cfg[key] = cfg['interface'] + ':%i' % cfg[key]
432 cfg[key] = cfg['interface'] + ':%i' % cfg[key]
433
433
434 url = cfg['registration']
434 url = cfg['registration']
435
435
436 if location is not None and addr == LOCALHOST:
436 if location is not None and addr == localhost():
437 # location specified, and connection is expected to be local
437 # location specified, and connection is expected to be local
438 if location not in LOCAL_IPS and not sshserver:
438 if not is_local_ip(location) and not sshserver:
439 # load ssh from JSON *only* if the controller is not on
439 # load ssh from JSON *only* if the controller is not on
440 # this machine
440 # this machine
441 sshserver=cfg['ssh']
441 sshserver=cfg['ssh']
442 if location not in LOCAL_IPS and not sshserver:
442 if not is_local_ip(location) and not sshserver:
443 # warn if no ssh specified, but SSH is probably needed
443 # warn if no ssh specified, but SSH is probably needed
444 # This is only a warning, because the most likely cause
444 # This is only a warning, because the most likely cause
445 # is a local Controller on a laptop whose IP is dynamic
445 # is a local Controller on a laptop whose IP is dynamic
446 warnings.warn("""
446 warnings.warn("""
447 Controller appears to be listening on localhost, but not on this machine.
447 Controller appears to be listening on localhost, but not on this machine.
448 If this is true, you should specify Client(...,sshserver='you@%s')
448 If this is true, you should specify Client(...,sshserver='you@%s')
449 or instruct your controller to listen on an external IP."""%location,
449 or instruct your controller to listen on an external IP."""%location,
450 RuntimeWarning)
450 RuntimeWarning)
451 elif not sshserver:
451 elif not sshserver:
452 # otherwise sync with cfg
452 # otherwise sync with cfg
453 sshserver = cfg['ssh']
453 sshserver = cfg['ssh']
454
454
455 self._config = cfg
455 self._config = cfg
456
456
457 self._ssh = bool(sshserver or sshkey or password)
457 self._ssh = bool(sshserver or sshkey or password)
458 if self._ssh and sshserver is None:
458 if self._ssh and sshserver is None:
459 # default to ssh via localhost
459 # default to ssh via localhost
460 sshserver = addr
460 sshserver = addr
461 if self._ssh and password is None:
461 if self._ssh and password is None:
462 if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko):
462 if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko):
463 password=False
463 password=False
464 else:
464 else:
465 password = getpass("SSH Password for %s: "%sshserver)
465 password = getpass("SSH Password for %s: "%sshserver)
466 ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko)
466 ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko)
467
467
468 # configure and construct the session
468 # configure and construct the session
469 try:
469 try:
470 extra_args['packer'] = cfg['pack']
470 extra_args['packer'] = cfg['pack']
471 extra_args['unpacker'] = cfg['unpack']
471 extra_args['unpacker'] = cfg['unpack']
472 extra_args['key'] = cast_bytes(cfg['key'])
472 extra_args['key'] = cast_bytes(cfg['key'])
473 extra_args['signature_scheme'] = cfg['signature_scheme']
473 extra_args['signature_scheme'] = cfg['signature_scheme']
474 except KeyError as exc:
474 except KeyError as exc:
475 msg = '\n'.join([
475 msg = '\n'.join([
476 "Connection file is invalid (missing '{}'), possibly from an old version of IPython.",
476 "Connection file is invalid (missing '{}'), possibly from an old version of IPython.",
477 "If you are reusing connection files, remove them and start ipcontroller again."
477 "If you are reusing connection files, remove them and start ipcontroller again."
478 ])
478 ])
479 raise ValueError(msg.format(exc.message))
479 raise ValueError(msg.format(exc.message))
480
480
481 self.session = Session(**extra_args)
481 self.session = Session(**extra_args)
482
482
483 self._query_socket = self._context.socket(zmq.DEALER)
483 self._query_socket = self._context.socket(zmq.DEALER)
484
484
485 if self._ssh:
485 if self._ssh:
486 tunnel.tunnel_connection(self._query_socket, cfg['registration'], sshserver, **ssh_kwargs)
486 tunnel.tunnel_connection(self._query_socket, cfg['registration'], sshserver, **ssh_kwargs)
487 else:
487 else:
488 self._query_socket.connect(cfg['registration'])
488 self._query_socket.connect(cfg['registration'])
489
489
490 self.session.debug = self.debug
490 self.session.debug = self.debug
491
491
492 self._notification_handlers = {'registration_notification' : self._register_engine,
492 self._notification_handlers = {'registration_notification' : self._register_engine,
493 'unregistration_notification' : self._unregister_engine,
493 'unregistration_notification' : self._unregister_engine,
494 'shutdown_notification' : lambda msg: self.close(),
494 'shutdown_notification' : lambda msg: self.close(),
495 }
495 }
496 self._queue_handlers = {'execute_reply' : self._handle_execute_reply,
496 self._queue_handlers = {'execute_reply' : self._handle_execute_reply,
497 'apply_reply' : self._handle_apply_reply}
497 'apply_reply' : self._handle_apply_reply}
498
498
499 try:
499 try:
500 self._connect(sshserver, ssh_kwargs, timeout)
500 self._connect(sshserver, ssh_kwargs, timeout)
501 except:
501 except:
502 self.close(linger=0)
502 self.close(linger=0)
503 raise
503 raise
504
504
505 # last step: setup magics, if we are in IPython:
505 # last step: setup magics, if we are in IPython:
506
506
507 try:
507 try:
508 ip = get_ipython()
508 ip = get_ipython()
509 except NameError:
509 except NameError:
510 return
510 return
511 else:
511 else:
512 if 'px' not in ip.magics_manager.magics:
512 if 'px' not in ip.magics_manager.magics:
513 # in IPython but we are the first Client.
513 # in IPython but we are the first Client.
514 # activate a default view for parallel magics.
514 # activate a default view for parallel magics.
515 self.activate()
515 self.activate()
516
516
517 def __del__(self):
517 def __del__(self):
518 """cleanup sockets, but _not_ context."""
518 """cleanup sockets, but _not_ context."""
519 self.close()
519 self.close()
520
520
521 def _setup_profile_dir(self, profile, profile_dir, ipython_dir):
521 def _setup_profile_dir(self, profile, profile_dir, ipython_dir):
522 if ipython_dir is None:
522 if ipython_dir is None:
523 ipython_dir = get_ipython_dir()
523 ipython_dir = get_ipython_dir()
524 if profile_dir is not None:
524 if profile_dir is not None:
525 try:
525 try:
526 self._cd = ProfileDir.find_profile_dir(profile_dir)
526 self._cd = ProfileDir.find_profile_dir(profile_dir)
527 return
527 return
528 except ProfileDirError:
528 except ProfileDirError:
529 pass
529 pass
530 elif profile is not None:
530 elif profile is not None:
531 try:
531 try:
532 self._cd = ProfileDir.find_profile_dir_by_name(
532 self._cd = ProfileDir.find_profile_dir_by_name(
533 ipython_dir, profile)
533 ipython_dir, profile)
534 return
534 return
535 except ProfileDirError:
535 except ProfileDirError:
536 pass
536 pass
537 self._cd = None
537 self._cd = None
538
538
539 def _update_engines(self, engines):
539 def _update_engines(self, engines):
540 """Update our engines dict and _ids from a dict of the form: {id:uuid}."""
540 """Update our engines dict and _ids from a dict of the form: {id:uuid}."""
541 for k,v in engines.iteritems():
541 for k,v in engines.iteritems():
542 eid = int(k)
542 eid = int(k)
543 if eid not in self._engines:
543 if eid not in self._engines:
544 self._ids.append(eid)
544 self._ids.append(eid)
545 self._engines[eid] = v
545 self._engines[eid] = v
546 self._ids = sorted(self._ids)
546 self._ids = sorted(self._ids)
547 if sorted(self._engines.keys()) != range(len(self._engines)) and \
547 if sorted(self._engines.keys()) != range(len(self._engines)) and \
548 self._task_scheme == 'pure' and self._task_socket:
548 self._task_scheme == 'pure' and self._task_socket:
549 self._stop_scheduling_tasks()
549 self._stop_scheduling_tasks()
550
550
551 def _stop_scheduling_tasks(self):
551 def _stop_scheduling_tasks(self):
552 """Stop scheduling tasks because an engine has been unregistered
552 """Stop scheduling tasks because an engine has been unregistered
553 from a pure ZMQ scheduler.
553 from a pure ZMQ scheduler.
554 """
554 """
555 self._task_socket.close()
555 self._task_socket.close()
556 self._task_socket = None
556 self._task_socket = None
557 msg = "An engine has been unregistered, and we are using pure " +\
557 msg = "An engine has been unregistered, and we are using pure " +\
558 "ZMQ task scheduling. Task farming will be disabled."
558 "ZMQ task scheduling. Task farming will be disabled."
559 if self.outstanding:
559 if self.outstanding:
560 msg += " If you were running tasks when this happened, " +\
560 msg += " If you were running tasks when this happened, " +\
561 "some `outstanding` msg_ids may never resolve."
561 "some `outstanding` msg_ids may never resolve."
562 warnings.warn(msg, RuntimeWarning)
562 warnings.warn(msg, RuntimeWarning)
563
563
564 def _build_targets(self, targets):
564 def _build_targets(self, targets):
565 """Turn valid target IDs or 'all' into two lists:
565 """Turn valid target IDs or 'all' into two lists:
566 (int_ids, uuids).
566 (int_ids, uuids).
567 """
567 """
568 if not self._ids:
568 if not self._ids:
569 # flush notification socket if no engines yet, just in case
569 # flush notification socket if no engines yet, just in case
570 if not self.ids:
570 if not self.ids:
571 raise error.NoEnginesRegistered("Can't build targets without any engines")
571 raise error.NoEnginesRegistered("Can't build targets without any engines")
572
572
573 if targets is None:
573 if targets is None:
574 targets = self._ids
574 targets = self._ids
575 elif isinstance(targets, basestring):
575 elif isinstance(targets, basestring):
576 if targets.lower() == 'all':
576 if targets.lower() == 'all':
577 targets = self._ids
577 targets = self._ids
578 else:
578 else:
579 raise TypeError("%r not valid str target, must be 'all'"%(targets))
579 raise TypeError("%r not valid str target, must be 'all'"%(targets))
580 elif isinstance(targets, int):
580 elif isinstance(targets, int):
581 if targets < 0:
581 if targets < 0:
582 targets = self.ids[targets]
582 targets = self.ids[targets]
583 if targets not in self._ids:
583 if targets not in self._ids:
584 raise IndexError("No such engine: %i"%targets)
584 raise IndexError("No such engine: %i"%targets)
585 targets = [targets]
585 targets = [targets]
586
586
587 if isinstance(targets, slice):
587 if isinstance(targets, slice):
588 indices = range(len(self._ids))[targets]
588 indices = range(len(self._ids))[targets]
589 ids = self.ids
589 ids = self.ids
590 targets = [ ids[i] for i in indices ]
590 targets = [ ids[i] for i in indices ]
591
591
592 if not isinstance(targets, (tuple, list, xrange)):
592 if not isinstance(targets, (tuple, list, xrange)):
593 raise TypeError("targets by int/slice/collection of ints only, not %s"%(type(targets)))
593 raise TypeError("targets by int/slice/collection of ints only, not %s"%(type(targets)))
594
594
595 return [cast_bytes(self._engines[t]) for t in targets], list(targets)
595 return [cast_bytes(self._engines[t]) for t in targets], list(targets)
596
596
597 def _connect(self, sshserver, ssh_kwargs, timeout):
597 def _connect(self, sshserver, ssh_kwargs, timeout):
598 """setup all our socket connections to the cluster. This is called from
598 """setup all our socket connections to the cluster. This is called from
599 __init__."""
599 __init__."""
600
600
601 # Maybe allow reconnecting?
601 # Maybe allow reconnecting?
602 if self._connected:
602 if self._connected:
603 return
603 return
604 self._connected=True
604 self._connected=True
605
605
606 def connect_socket(s, url):
606 def connect_socket(s, url):
607 if self._ssh:
607 if self._ssh:
608 return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs)
608 return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs)
609 else:
609 else:
610 return s.connect(url)
610 return s.connect(url)
611
611
612 self.session.send(self._query_socket, 'connection_request')
612 self.session.send(self._query_socket, 'connection_request')
613 # use Poller because zmq.select has wrong units in pyzmq 2.1.7
613 # use Poller because zmq.select has wrong units in pyzmq 2.1.7
614 poller = zmq.Poller()
614 poller = zmq.Poller()
615 poller.register(self._query_socket, zmq.POLLIN)
615 poller.register(self._query_socket, zmq.POLLIN)
616 # poll expects milliseconds, timeout is seconds
616 # poll expects milliseconds, timeout is seconds
617 evts = poller.poll(timeout*1000)
617 evts = poller.poll(timeout*1000)
618 if not evts:
618 if not evts:
619 raise error.TimeoutError("Hub connection request timed out")
619 raise error.TimeoutError("Hub connection request timed out")
620 idents,msg = self.session.recv(self._query_socket,mode=0)
620 idents,msg = self.session.recv(self._query_socket,mode=0)
621 if self.debug:
621 if self.debug:
622 pprint(msg)
622 pprint(msg)
623 content = msg['content']
623 content = msg['content']
624 # self._config['registration'] = dict(content)
624 # self._config['registration'] = dict(content)
625 cfg = self._config
625 cfg = self._config
626 if content['status'] == 'ok':
626 if content['status'] == 'ok':
627 self._mux_socket = self._context.socket(zmq.DEALER)
627 self._mux_socket = self._context.socket(zmq.DEALER)
628 connect_socket(self._mux_socket, cfg['mux'])
628 connect_socket(self._mux_socket, cfg['mux'])
629
629
630 self._task_socket = self._context.socket(zmq.DEALER)
630 self._task_socket = self._context.socket(zmq.DEALER)
631 connect_socket(self._task_socket, cfg['task'])
631 connect_socket(self._task_socket, cfg['task'])
632
632
633 self._notification_socket = self._context.socket(zmq.SUB)
633 self._notification_socket = self._context.socket(zmq.SUB)
634 self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'')
634 self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'')
635 connect_socket(self._notification_socket, cfg['notification'])
635 connect_socket(self._notification_socket, cfg['notification'])
636
636
637 self._control_socket = self._context.socket(zmq.DEALER)
637 self._control_socket = self._context.socket(zmq.DEALER)
638 connect_socket(self._control_socket, cfg['control'])
638 connect_socket(self._control_socket, cfg['control'])
639
639
640 self._iopub_socket = self._context.socket(zmq.SUB)
640 self._iopub_socket = self._context.socket(zmq.SUB)
641 self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'')
641 self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'')
642 connect_socket(self._iopub_socket, cfg['iopub'])
642 connect_socket(self._iopub_socket, cfg['iopub'])
643
643
644 self._update_engines(dict(content['engines']))
644 self._update_engines(dict(content['engines']))
645 else:
645 else:
646 self._connected = False
646 self._connected = False
647 raise Exception("Failed to connect!")
647 raise Exception("Failed to connect!")
648
648
649 #--------------------------------------------------------------------------
649 #--------------------------------------------------------------------------
650 # handlers and callbacks for incoming messages
650 # handlers and callbacks for incoming messages
651 #--------------------------------------------------------------------------
651 #--------------------------------------------------------------------------
652
652
653 def _unwrap_exception(self, content):
653 def _unwrap_exception(self, content):
654 """unwrap exception, and remap engine_id to int."""
654 """unwrap exception, and remap engine_id to int."""
655 e = error.unwrap_exception(content)
655 e = error.unwrap_exception(content)
656 # print e.traceback
656 # print e.traceback
657 if e.engine_info:
657 if e.engine_info:
658 e_uuid = e.engine_info['engine_uuid']
658 e_uuid = e.engine_info['engine_uuid']
659 eid = self._engines[e_uuid]
659 eid = self._engines[e_uuid]
660 e.engine_info['engine_id'] = eid
660 e.engine_info['engine_id'] = eid
661 return e
661 return e
662
662
663 def _extract_metadata(self, msg):
663 def _extract_metadata(self, msg):
664 header = msg['header']
664 header = msg['header']
665 parent = msg['parent_header']
665 parent = msg['parent_header']
666 msg_meta = msg['metadata']
666 msg_meta = msg['metadata']
667 content = msg['content']
667 content = msg['content']
668 md = {'msg_id' : parent['msg_id'],
668 md = {'msg_id' : parent['msg_id'],
669 'received' : datetime.now(),
669 'received' : datetime.now(),
670 'engine_uuid' : msg_meta.get('engine', None),
670 'engine_uuid' : msg_meta.get('engine', None),
671 'follow' : msg_meta.get('follow', []),
671 'follow' : msg_meta.get('follow', []),
672 'after' : msg_meta.get('after', []),
672 'after' : msg_meta.get('after', []),
673 'status' : content['status'],
673 'status' : content['status'],
674 }
674 }
675
675
676 if md['engine_uuid'] is not None:
676 if md['engine_uuid'] is not None:
677 md['engine_id'] = self._engines.get(md['engine_uuid'], None)
677 md['engine_id'] = self._engines.get(md['engine_uuid'], None)
678
678
679 if 'date' in parent:
679 if 'date' in parent:
680 md['submitted'] = parent['date']
680 md['submitted'] = parent['date']
681 if 'started' in msg_meta:
681 if 'started' in msg_meta:
682 md['started'] = msg_meta['started']
682 md['started'] = msg_meta['started']
683 if 'date' in header:
683 if 'date' in header:
684 md['completed'] = header['date']
684 md['completed'] = header['date']
685 return md
685 return md
686
686
687 def _register_engine(self, msg):
687 def _register_engine(self, msg):
688 """Register a new engine, and update our connection info."""
688 """Register a new engine, and update our connection info."""
689 content = msg['content']
689 content = msg['content']
690 eid = content['id']
690 eid = content['id']
691 d = {eid : content['uuid']}
691 d = {eid : content['uuid']}
692 self._update_engines(d)
692 self._update_engines(d)
693
693
694 def _unregister_engine(self, msg):
694 def _unregister_engine(self, msg):
695 """Unregister an engine that has died."""
695 """Unregister an engine that has died."""
696 content = msg['content']
696 content = msg['content']
697 eid = int(content['id'])
697 eid = int(content['id'])
698 if eid in self._ids:
698 if eid in self._ids:
699 self._ids.remove(eid)
699 self._ids.remove(eid)
700 uuid = self._engines.pop(eid)
700 uuid = self._engines.pop(eid)
701
701
702 self._handle_stranded_msgs(eid, uuid)
702 self._handle_stranded_msgs(eid, uuid)
703
703
704 if self._task_socket and self._task_scheme == 'pure':
704 if self._task_socket and self._task_scheme == 'pure':
705 self._stop_scheduling_tasks()
705 self._stop_scheduling_tasks()
706
706
707 def _handle_stranded_msgs(self, eid, uuid):
707 def _handle_stranded_msgs(self, eid, uuid):
708 """Handle messages known to be on an engine when the engine unregisters.
708 """Handle messages known to be on an engine when the engine unregisters.
709
709
710 It is possible that this will fire prematurely - that is, an engine will
710 It is possible that this will fire prematurely - that is, an engine will
711 go down after completing a result, and the client will be notified
711 go down after completing a result, and the client will be notified
712 of the unregistration and later receive the successful result.
712 of the unregistration and later receive the successful result.
713 """
713 """
714
714
715 outstanding = self._outstanding_dict[uuid]
715 outstanding = self._outstanding_dict[uuid]
716
716
717 for msg_id in list(outstanding):
717 for msg_id in list(outstanding):
718 if msg_id in self.results:
718 if msg_id in self.results:
719 # we already
719 # we already
720 continue
720 continue
721 try:
721 try:
722 raise error.EngineError("Engine %r died while running task %r"%(eid, msg_id))
722 raise error.EngineError("Engine %r died while running task %r"%(eid, msg_id))
723 except:
723 except:
724 content = error.wrap_exception()
724 content = error.wrap_exception()
725 # build a fake message:
725 # build a fake message:
726 msg = self.session.msg('apply_reply', content=content)
726 msg = self.session.msg('apply_reply', content=content)
727 msg['parent_header']['msg_id'] = msg_id
727 msg['parent_header']['msg_id'] = msg_id
728 msg['metadata']['engine'] = uuid
728 msg['metadata']['engine'] = uuid
729 self._handle_apply_reply(msg)
729 self._handle_apply_reply(msg)
730
730
731 def _handle_execute_reply(self, msg):
731 def _handle_execute_reply(self, msg):
732 """Save the reply to an execute_request into our results.
732 """Save the reply to an execute_request into our results.
733
733
734 execute messages are never actually used. apply is used instead.
734 execute messages are never actually used. apply is used instead.
735 """
735 """
736
736
737 parent = msg['parent_header']
737 parent = msg['parent_header']
738 msg_id = parent['msg_id']
738 msg_id = parent['msg_id']
739 if msg_id not in self.outstanding:
739 if msg_id not in self.outstanding:
740 if msg_id in self.history:
740 if msg_id in self.history:
741 print ("got stale result: %s"%msg_id)
741 print ("got stale result: %s"%msg_id)
742 else:
742 else:
743 print ("got unknown result: %s"%msg_id)
743 print ("got unknown result: %s"%msg_id)
744 else:
744 else:
745 self.outstanding.remove(msg_id)
745 self.outstanding.remove(msg_id)
746
746
747 content = msg['content']
747 content = msg['content']
748 header = msg['header']
748 header = msg['header']
749
749
750 # construct metadata:
750 # construct metadata:
751 md = self.metadata[msg_id]
751 md = self.metadata[msg_id]
752 md.update(self._extract_metadata(msg))
752 md.update(self._extract_metadata(msg))
753 # is this redundant?
753 # is this redundant?
754 self.metadata[msg_id] = md
754 self.metadata[msg_id] = md
755
755
756 e_outstanding = self._outstanding_dict[md['engine_uuid']]
756 e_outstanding = self._outstanding_dict[md['engine_uuid']]
757 if msg_id in e_outstanding:
757 if msg_id in e_outstanding:
758 e_outstanding.remove(msg_id)
758 e_outstanding.remove(msg_id)
759
759
760 # construct result:
760 # construct result:
761 if content['status'] == 'ok':
761 if content['status'] == 'ok':
762 self.results[msg_id] = ExecuteReply(msg_id, content, md)
762 self.results[msg_id] = ExecuteReply(msg_id, content, md)
763 elif content['status'] == 'aborted':
763 elif content['status'] == 'aborted':
764 self.results[msg_id] = error.TaskAborted(msg_id)
764 self.results[msg_id] = error.TaskAborted(msg_id)
765 elif content['status'] == 'resubmitted':
765 elif content['status'] == 'resubmitted':
766 # TODO: handle resubmission
766 # TODO: handle resubmission
767 pass
767 pass
768 else:
768 else:
769 self.results[msg_id] = self._unwrap_exception(content)
769 self.results[msg_id] = self._unwrap_exception(content)
770
770
771 def _handle_apply_reply(self, msg):
771 def _handle_apply_reply(self, msg):
772 """Save the reply to an apply_request into our results."""
772 """Save the reply to an apply_request into our results."""
773 parent = msg['parent_header']
773 parent = msg['parent_header']
774 msg_id = parent['msg_id']
774 msg_id = parent['msg_id']
775 if msg_id not in self.outstanding:
775 if msg_id not in self.outstanding:
776 if msg_id in self.history:
776 if msg_id in self.history:
777 print ("got stale result: %s"%msg_id)
777 print ("got stale result: %s"%msg_id)
778 print self.results[msg_id]
778 print self.results[msg_id]
779 print msg
779 print msg
780 else:
780 else:
781 print ("got unknown result: %s"%msg_id)
781 print ("got unknown result: %s"%msg_id)
782 else:
782 else:
783 self.outstanding.remove(msg_id)
783 self.outstanding.remove(msg_id)
784 content = msg['content']
784 content = msg['content']
785 header = msg['header']
785 header = msg['header']
786
786
787 # construct metadata:
787 # construct metadata:
788 md = self.metadata[msg_id]
788 md = self.metadata[msg_id]
789 md.update(self._extract_metadata(msg))
789 md.update(self._extract_metadata(msg))
790 # is this redundant?
790 # is this redundant?
791 self.metadata[msg_id] = md
791 self.metadata[msg_id] = md
792
792
793 e_outstanding = self._outstanding_dict[md['engine_uuid']]
793 e_outstanding = self._outstanding_dict[md['engine_uuid']]
794 if msg_id in e_outstanding:
794 if msg_id in e_outstanding:
795 e_outstanding.remove(msg_id)
795 e_outstanding.remove(msg_id)
796
796
797 # construct result:
797 # construct result:
798 if content['status'] == 'ok':
798 if content['status'] == 'ok':
799 self.results[msg_id] = serialize.unserialize_object(msg['buffers'])[0]
799 self.results[msg_id] = serialize.unserialize_object(msg['buffers'])[0]
800 elif content['status'] == 'aborted':
800 elif content['status'] == 'aborted':
801 self.results[msg_id] = error.TaskAborted(msg_id)
801 self.results[msg_id] = error.TaskAborted(msg_id)
802 elif content['status'] == 'resubmitted':
802 elif content['status'] == 'resubmitted':
803 # TODO: handle resubmission
803 # TODO: handle resubmission
804 pass
804 pass
805 else:
805 else:
806 self.results[msg_id] = self._unwrap_exception(content)
806 self.results[msg_id] = self._unwrap_exception(content)
807
807
808 def _flush_notifications(self):
808 def _flush_notifications(self):
809 """Flush notifications of engine registrations waiting
809 """Flush notifications of engine registrations waiting
810 in ZMQ queue."""
810 in ZMQ queue."""
811 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
811 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
812 while msg is not None:
812 while msg is not None:
813 if self.debug:
813 if self.debug:
814 pprint(msg)
814 pprint(msg)
815 msg_type = msg['header']['msg_type']
815 msg_type = msg['header']['msg_type']
816 handler = self._notification_handlers.get(msg_type, None)
816 handler = self._notification_handlers.get(msg_type, None)
817 if handler is None:
817 if handler is None:
818 raise Exception("Unhandled message type: %s" % msg_type)
818 raise Exception("Unhandled message type: %s" % msg_type)
819 else:
819 else:
820 handler(msg)
820 handler(msg)
821 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
821 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
822
822
823 def _flush_results(self, sock):
823 def _flush_results(self, sock):
824 """Flush task or queue results waiting in ZMQ queue."""
824 """Flush task or queue results waiting in ZMQ queue."""
825 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
825 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
826 while msg is not None:
826 while msg is not None:
827 if self.debug:
827 if self.debug:
828 pprint(msg)
828 pprint(msg)
829 msg_type = msg['header']['msg_type']
829 msg_type = msg['header']['msg_type']
830 handler = self._queue_handlers.get(msg_type, None)
830 handler = self._queue_handlers.get(msg_type, None)
831 if handler is None:
831 if handler is None:
832 raise Exception("Unhandled message type: %s" % msg_type)
832 raise Exception("Unhandled message type: %s" % msg_type)
833 else:
833 else:
834 handler(msg)
834 handler(msg)
835 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
835 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
836
836
837 def _flush_control(self, sock):
837 def _flush_control(self, sock):
838 """Flush replies from the control channel waiting
838 """Flush replies from the control channel waiting
839 in the ZMQ queue.
839 in the ZMQ queue.
840
840
841 Currently: ignore them."""
841 Currently: ignore them."""
842 if self._ignored_control_replies <= 0:
842 if self._ignored_control_replies <= 0:
843 return
843 return
844 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
844 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
845 while msg is not None:
845 while msg is not None:
846 self._ignored_control_replies -= 1
846 self._ignored_control_replies -= 1
847 if self.debug:
847 if self.debug:
848 pprint(msg)
848 pprint(msg)
849 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
849 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
850
850
851 def _flush_ignored_control(self):
851 def _flush_ignored_control(self):
852 """flush ignored control replies"""
852 """flush ignored control replies"""
853 while self._ignored_control_replies > 0:
853 while self._ignored_control_replies > 0:
854 self.session.recv(self._control_socket)
854 self.session.recv(self._control_socket)
855 self._ignored_control_replies -= 1
855 self._ignored_control_replies -= 1
856
856
857 def _flush_ignored_hub_replies(self):
857 def _flush_ignored_hub_replies(self):
858 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
858 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
859 while msg is not None:
859 while msg is not None:
860 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
860 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
861
861
862 def _flush_iopub(self, sock):
862 def _flush_iopub(self, sock):
863 """Flush replies from the iopub channel waiting
863 """Flush replies from the iopub channel waiting
864 in the ZMQ queue.
864 in the ZMQ queue.
865 """
865 """
866 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
866 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
867 while msg is not None:
867 while msg is not None:
868 if self.debug:
868 if self.debug:
869 pprint(msg)
869 pprint(msg)
870 parent = msg['parent_header']
870 parent = msg['parent_header']
871 # ignore IOPub messages with no parent.
871 # ignore IOPub messages with no parent.
872 # Caused by print statements or warnings from before the first execution.
872 # Caused by print statements or warnings from before the first execution.
873 if not parent:
873 if not parent:
874 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
874 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
875 continue
875 continue
876 msg_id = parent['msg_id']
876 msg_id = parent['msg_id']
877 content = msg['content']
877 content = msg['content']
878 header = msg['header']
878 header = msg['header']
879 msg_type = msg['header']['msg_type']
879 msg_type = msg['header']['msg_type']
880
880
881 # init metadata:
881 # init metadata:
882 md = self.metadata[msg_id]
882 md = self.metadata[msg_id]
883
883
884 if msg_type == 'stream':
884 if msg_type == 'stream':
885 name = content['name']
885 name = content['name']
886 s = md[name] or ''
886 s = md[name] or ''
887 md[name] = s + content['data']
887 md[name] = s + content['data']
888 elif msg_type == 'pyerr':
888 elif msg_type == 'pyerr':
889 md.update({'pyerr' : self._unwrap_exception(content)})
889 md.update({'pyerr' : self._unwrap_exception(content)})
890 elif msg_type == 'pyin':
890 elif msg_type == 'pyin':
891 md.update({'pyin' : content['code']})
891 md.update({'pyin' : content['code']})
892 elif msg_type == 'display_data':
892 elif msg_type == 'display_data':
893 md['outputs'].append(content)
893 md['outputs'].append(content)
894 elif msg_type == 'pyout':
894 elif msg_type == 'pyout':
895 md['pyout'] = content
895 md['pyout'] = content
896 elif msg_type == 'data_message':
896 elif msg_type == 'data_message':
897 data, remainder = serialize.unserialize_object(msg['buffers'])
897 data, remainder = serialize.unserialize_object(msg['buffers'])
898 md['data'].update(data)
898 md['data'].update(data)
899 elif msg_type == 'status':
899 elif msg_type == 'status':
900 # idle message comes after all outputs
900 # idle message comes after all outputs
901 if content['execution_state'] == 'idle':
901 if content['execution_state'] == 'idle':
902 md['outputs_ready'] = True
902 md['outputs_ready'] = True
903 else:
903 else:
904 # unhandled msg_type (status, etc.)
904 # unhandled msg_type (status, etc.)
905 pass
905 pass
906
906
907 # reduntant?
907 # reduntant?
908 self.metadata[msg_id] = md
908 self.metadata[msg_id] = md
909
909
910 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
910 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
911
911
912 #--------------------------------------------------------------------------
912 #--------------------------------------------------------------------------
913 # len, getitem
913 # len, getitem
914 #--------------------------------------------------------------------------
914 #--------------------------------------------------------------------------
915
915
916 def __len__(self):
916 def __len__(self):
917 """len(client) returns # of engines."""
917 """len(client) returns # of engines."""
918 return len(self.ids)
918 return len(self.ids)
919
919
920 def __getitem__(self, key):
920 def __getitem__(self, key):
921 """index access returns DirectView multiplexer objects
921 """index access returns DirectView multiplexer objects
922
922
923 Must be int, slice, or list/tuple/xrange of ints"""
923 Must be int, slice, or list/tuple/xrange of ints"""
924 if not isinstance(key, (int, slice, tuple, list, xrange)):
924 if not isinstance(key, (int, slice, tuple, list, xrange)):
925 raise TypeError("key by int/slice/iterable of ints only, not %s"%(type(key)))
925 raise TypeError("key by int/slice/iterable of ints only, not %s"%(type(key)))
926 else:
926 else:
927 return self.direct_view(key)
927 return self.direct_view(key)
928
928
929 #--------------------------------------------------------------------------
929 #--------------------------------------------------------------------------
930 # Begin public methods
930 # Begin public methods
931 #--------------------------------------------------------------------------
931 #--------------------------------------------------------------------------
932
932
933 @property
933 @property
934 def ids(self):
934 def ids(self):
935 """Always up-to-date ids property."""
935 """Always up-to-date ids property."""
936 self._flush_notifications()
936 self._flush_notifications()
937 # always copy:
937 # always copy:
938 return list(self._ids)
938 return list(self._ids)
939
939
940 def activate(self, targets='all', suffix=''):
940 def activate(self, targets='all', suffix=''):
941 """Create a DirectView and register it with IPython magics
941 """Create a DirectView and register it with IPython magics
942
942
943 Defines the magics `%px, %autopx, %pxresult, %%px`
943 Defines the magics `%px, %autopx, %pxresult, %%px`
944
944
945 Parameters
945 Parameters
946 ----------
946 ----------
947
947
948 targets: int, list of ints, or 'all'
948 targets: int, list of ints, or 'all'
949 The engines on which the view's magics will run
949 The engines on which the view's magics will run
950 suffix: str [default: '']
950 suffix: str [default: '']
951 The suffix, if any, for the magics. This allows you to have
951 The suffix, if any, for the magics. This allows you to have
952 multiple views associated with parallel magics at the same time.
952 multiple views associated with parallel magics at the same time.
953
953
954 e.g. ``rc.activate(targets=0, suffix='0')`` will give you
954 e.g. ``rc.activate(targets=0, suffix='0')`` will give you
955 the magics ``%px0``, ``%pxresult0``, etc. for running magics just
955 the magics ``%px0``, ``%pxresult0``, etc. for running magics just
956 on engine 0.
956 on engine 0.
957 """
957 """
958 view = self.direct_view(targets)
958 view = self.direct_view(targets)
959 view.block = True
959 view.block = True
960 view.activate(suffix)
960 view.activate(suffix)
961 return view
961 return view
962
962
963 def close(self, linger=None):
963 def close(self, linger=None):
964 """Close my zmq Sockets
964 """Close my zmq Sockets
965
965
966 If `linger`, set the zmq LINGER socket option,
966 If `linger`, set the zmq LINGER socket option,
967 which allows discarding of messages.
967 which allows discarding of messages.
968 """
968 """
969 if self._closed:
969 if self._closed:
970 return
970 return
971 self.stop_spin_thread()
971 self.stop_spin_thread()
972 snames = [ trait for trait in self.trait_names() if trait.endswith("socket") ]
972 snames = [ trait for trait in self.trait_names() if trait.endswith("socket") ]
973 for name in snames:
973 for name in snames:
974 socket = getattr(self, name)
974 socket = getattr(self, name)
975 if socket is not None and not socket.closed:
975 if socket is not None and not socket.closed:
976 if linger is not None:
976 if linger is not None:
977 socket.close(linger=linger)
977 socket.close(linger=linger)
978 else:
978 else:
979 socket.close()
979 socket.close()
980 self._closed = True
980 self._closed = True
981
981
982 def _spin_every(self, interval=1):
982 def _spin_every(self, interval=1):
983 """target func for use in spin_thread"""
983 """target func for use in spin_thread"""
984 while True:
984 while True:
985 if self._stop_spinning.is_set():
985 if self._stop_spinning.is_set():
986 return
986 return
987 time.sleep(interval)
987 time.sleep(interval)
988 self.spin()
988 self.spin()
989
989
990 def spin_thread(self, interval=1):
990 def spin_thread(self, interval=1):
991 """call Client.spin() in a background thread on some regular interval
991 """call Client.spin() in a background thread on some regular interval
992
992
993 This helps ensure that messages don't pile up too much in the zmq queue
993 This helps ensure that messages don't pile up too much in the zmq queue
994 while you are working on other things, or just leaving an idle terminal.
994 while you are working on other things, or just leaving an idle terminal.
995
995
996 It also helps limit potential padding of the `received` timestamp
996 It also helps limit potential padding of the `received` timestamp
997 on AsyncResult objects, used for timings.
997 on AsyncResult objects, used for timings.
998
998
999 Parameters
999 Parameters
1000 ----------
1000 ----------
1001
1001
1002 interval : float, optional
1002 interval : float, optional
1003 The interval on which to spin the client in the background thread
1003 The interval on which to spin the client in the background thread
1004 (simply passed to time.sleep).
1004 (simply passed to time.sleep).
1005
1005
1006 Notes
1006 Notes
1007 -----
1007 -----
1008
1008
1009 For precision timing, you may want to use this method to put a bound
1009 For precision timing, you may want to use this method to put a bound
1010 on the jitter (in seconds) in `received` timestamps used
1010 on the jitter (in seconds) in `received` timestamps used
1011 in AsyncResult.wall_time.
1011 in AsyncResult.wall_time.
1012
1012
1013 """
1013 """
1014 if self._spin_thread is not None:
1014 if self._spin_thread is not None:
1015 self.stop_spin_thread()
1015 self.stop_spin_thread()
1016 self._stop_spinning.clear()
1016 self._stop_spinning.clear()
1017 self._spin_thread = Thread(target=self._spin_every, args=(interval,))
1017 self._spin_thread = Thread(target=self._spin_every, args=(interval,))
1018 self._spin_thread.daemon = True
1018 self._spin_thread.daemon = True
1019 self._spin_thread.start()
1019 self._spin_thread.start()
1020
1020
1021 def stop_spin_thread(self):
1021 def stop_spin_thread(self):
1022 """stop background spin_thread, if any"""
1022 """stop background spin_thread, if any"""
1023 if self._spin_thread is not None:
1023 if self._spin_thread is not None:
1024 self._stop_spinning.set()
1024 self._stop_spinning.set()
1025 self._spin_thread.join()
1025 self._spin_thread.join()
1026 self._spin_thread = None
1026 self._spin_thread = None
1027
1027
1028 def spin(self):
1028 def spin(self):
1029 """Flush any registration notifications and execution results
1029 """Flush any registration notifications and execution results
1030 waiting in the ZMQ queue.
1030 waiting in the ZMQ queue.
1031 """
1031 """
1032 if self._notification_socket:
1032 if self._notification_socket:
1033 self._flush_notifications()
1033 self._flush_notifications()
1034 if self._iopub_socket:
1034 if self._iopub_socket:
1035 self._flush_iopub(self._iopub_socket)
1035 self._flush_iopub(self._iopub_socket)
1036 if self._mux_socket:
1036 if self._mux_socket:
1037 self._flush_results(self._mux_socket)
1037 self._flush_results(self._mux_socket)
1038 if self._task_socket:
1038 if self._task_socket:
1039 self._flush_results(self._task_socket)
1039 self._flush_results(self._task_socket)
1040 if self._control_socket:
1040 if self._control_socket:
1041 self._flush_control(self._control_socket)
1041 self._flush_control(self._control_socket)
1042 if self._query_socket:
1042 if self._query_socket:
1043 self._flush_ignored_hub_replies()
1043 self._flush_ignored_hub_replies()
1044
1044
1045 def wait(self, jobs=None, timeout=-1):
1045 def wait(self, jobs=None, timeout=-1):
1046 """waits on one or more `jobs`, for up to `timeout` seconds.
1046 """waits on one or more `jobs`, for up to `timeout` seconds.
1047
1047
1048 Parameters
1048 Parameters
1049 ----------
1049 ----------
1050
1050
1051 jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects
1051 jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects
1052 ints are indices to self.history
1052 ints are indices to self.history
1053 strs are msg_ids
1053 strs are msg_ids
1054 default: wait on all outstanding messages
1054 default: wait on all outstanding messages
1055 timeout : float
1055 timeout : float
1056 a time in seconds, after which to give up.
1056 a time in seconds, after which to give up.
1057 default is -1, which means no timeout
1057 default is -1, which means no timeout
1058
1058
1059 Returns
1059 Returns
1060 -------
1060 -------
1061
1061
1062 True : when all msg_ids are done
1062 True : when all msg_ids are done
1063 False : timeout reached, some msg_ids still outstanding
1063 False : timeout reached, some msg_ids still outstanding
1064 """
1064 """
1065 tic = time.time()
1065 tic = time.time()
1066 if jobs is None:
1066 if jobs is None:
1067 theids = self.outstanding
1067 theids = self.outstanding
1068 else:
1068 else:
1069 if isinstance(jobs, (int, basestring, AsyncResult)):
1069 if isinstance(jobs, (int, basestring, AsyncResult)):
1070 jobs = [jobs]
1070 jobs = [jobs]
1071 theids = set()
1071 theids = set()
1072 for job in jobs:
1072 for job in jobs:
1073 if isinstance(job, int):
1073 if isinstance(job, int):
1074 # index access
1074 # index access
1075 job = self.history[job]
1075 job = self.history[job]
1076 elif isinstance(job, AsyncResult):
1076 elif isinstance(job, AsyncResult):
1077 map(theids.add, job.msg_ids)
1077 map(theids.add, job.msg_ids)
1078 continue
1078 continue
1079 theids.add(job)
1079 theids.add(job)
1080 if not theids.intersection(self.outstanding):
1080 if not theids.intersection(self.outstanding):
1081 return True
1081 return True
1082 self.spin()
1082 self.spin()
1083 while theids.intersection(self.outstanding):
1083 while theids.intersection(self.outstanding):
1084 if timeout >= 0 and ( time.time()-tic ) > timeout:
1084 if timeout >= 0 and ( time.time()-tic ) > timeout:
1085 break
1085 break
1086 time.sleep(1e-3)
1086 time.sleep(1e-3)
1087 self.spin()
1087 self.spin()
1088 return len(theids.intersection(self.outstanding)) == 0
1088 return len(theids.intersection(self.outstanding)) == 0
1089
1089
1090 #--------------------------------------------------------------------------
1090 #--------------------------------------------------------------------------
1091 # Control methods
1091 # Control methods
1092 #--------------------------------------------------------------------------
1092 #--------------------------------------------------------------------------
1093
1093
1094 @spin_first
1094 @spin_first
1095 def clear(self, targets=None, block=None):
1095 def clear(self, targets=None, block=None):
1096 """Clear the namespace in target(s)."""
1096 """Clear the namespace in target(s)."""
1097 block = self.block if block is None else block
1097 block = self.block if block is None else block
1098 targets = self._build_targets(targets)[0]
1098 targets = self._build_targets(targets)[0]
1099 for t in targets:
1099 for t in targets:
1100 self.session.send(self._control_socket, 'clear_request', content={}, ident=t)
1100 self.session.send(self._control_socket, 'clear_request', content={}, ident=t)
1101 error = False
1101 error = False
1102 if block:
1102 if block:
1103 self._flush_ignored_control()
1103 self._flush_ignored_control()
1104 for i in range(len(targets)):
1104 for i in range(len(targets)):
1105 idents,msg = self.session.recv(self._control_socket,0)
1105 idents,msg = self.session.recv(self._control_socket,0)
1106 if self.debug:
1106 if self.debug:
1107 pprint(msg)
1107 pprint(msg)
1108 if msg['content']['status'] != 'ok':
1108 if msg['content']['status'] != 'ok':
1109 error = self._unwrap_exception(msg['content'])
1109 error = self._unwrap_exception(msg['content'])
1110 else:
1110 else:
1111 self._ignored_control_replies += len(targets)
1111 self._ignored_control_replies += len(targets)
1112 if error:
1112 if error:
1113 raise error
1113 raise error
1114
1114
1115
1115
1116 @spin_first
1116 @spin_first
1117 def abort(self, jobs=None, targets=None, block=None):
1117 def abort(self, jobs=None, targets=None, block=None):
1118 """Abort specific jobs from the execution queues of target(s).
1118 """Abort specific jobs from the execution queues of target(s).
1119
1119
1120 This is a mechanism to prevent jobs that have already been submitted
1120 This is a mechanism to prevent jobs that have already been submitted
1121 from executing.
1121 from executing.
1122
1122
1123 Parameters
1123 Parameters
1124 ----------
1124 ----------
1125
1125
1126 jobs : msg_id, list of msg_ids, or AsyncResult
1126 jobs : msg_id, list of msg_ids, or AsyncResult
1127 The jobs to be aborted
1127 The jobs to be aborted
1128
1128
1129 If unspecified/None: abort all outstanding jobs.
1129 If unspecified/None: abort all outstanding jobs.
1130
1130
1131 """
1131 """
1132 block = self.block if block is None else block
1132 block = self.block if block is None else block
1133 jobs = jobs if jobs is not None else list(self.outstanding)
1133 jobs = jobs if jobs is not None else list(self.outstanding)
1134 targets = self._build_targets(targets)[0]
1134 targets = self._build_targets(targets)[0]
1135
1135
1136 msg_ids = []
1136 msg_ids = []
1137 if isinstance(jobs, (basestring,AsyncResult)):
1137 if isinstance(jobs, (basestring,AsyncResult)):
1138 jobs = [jobs]
1138 jobs = [jobs]
1139 bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
1139 bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
1140 if bad_ids:
1140 if bad_ids:
1141 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1141 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1142 for j in jobs:
1142 for j in jobs:
1143 if isinstance(j, AsyncResult):
1143 if isinstance(j, AsyncResult):
1144 msg_ids.extend(j.msg_ids)
1144 msg_ids.extend(j.msg_ids)
1145 else:
1145 else:
1146 msg_ids.append(j)
1146 msg_ids.append(j)
1147 content = dict(msg_ids=msg_ids)
1147 content = dict(msg_ids=msg_ids)
1148 for t in targets:
1148 for t in targets:
1149 self.session.send(self._control_socket, 'abort_request',
1149 self.session.send(self._control_socket, 'abort_request',
1150 content=content, ident=t)
1150 content=content, ident=t)
1151 error = False
1151 error = False
1152 if block:
1152 if block:
1153 self._flush_ignored_control()
1153 self._flush_ignored_control()
1154 for i in range(len(targets)):
1154 for i in range(len(targets)):
1155 idents,msg = self.session.recv(self._control_socket,0)
1155 idents,msg = self.session.recv(self._control_socket,0)
1156 if self.debug:
1156 if self.debug:
1157 pprint(msg)
1157 pprint(msg)
1158 if msg['content']['status'] != 'ok':
1158 if msg['content']['status'] != 'ok':
1159 error = self._unwrap_exception(msg['content'])
1159 error = self._unwrap_exception(msg['content'])
1160 else:
1160 else:
1161 self._ignored_control_replies += len(targets)
1161 self._ignored_control_replies += len(targets)
1162 if error:
1162 if error:
1163 raise error
1163 raise error
1164
1164
1165 @spin_first
1165 @spin_first
1166 def shutdown(self, targets='all', restart=False, hub=False, block=None):
1166 def shutdown(self, targets='all', restart=False, hub=False, block=None):
1167 """Terminates one or more engine processes, optionally including the hub.
1167 """Terminates one or more engine processes, optionally including the hub.
1168
1168
1169 Parameters
1169 Parameters
1170 ----------
1170 ----------
1171
1171
1172 targets: list of ints or 'all' [default: all]
1172 targets: list of ints or 'all' [default: all]
1173 Which engines to shutdown.
1173 Which engines to shutdown.
1174 hub: bool [default: False]
1174 hub: bool [default: False]
1175 Whether to include the Hub. hub=True implies targets='all'.
1175 Whether to include the Hub. hub=True implies targets='all'.
1176 block: bool [default: self.block]
1176 block: bool [default: self.block]
1177 Whether to wait for clean shutdown replies or not.
1177 Whether to wait for clean shutdown replies or not.
1178 restart: bool [default: False]
1178 restart: bool [default: False]
1179 NOT IMPLEMENTED
1179 NOT IMPLEMENTED
1180 whether to restart engines after shutting them down.
1180 whether to restart engines after shutting them down.
1181 """
1181 """
1182 from IPython.parallel.error import NoEnginesRegistered
1182 from IPython.parallel.error import NoEnginesRegistered
1183 if restart:
1183 if restart:
1184 raise NotImplementedError("Engine restart is not yet implemented")
1184 raise NotImplementedError("Engine restart is not yet implemented")
1185
1185
1186 block = self.block if block is None else block
1186 block = self.block if block is None else block
1187 if hub:
1187 if hub:
1188 targets = 'all'
1188 targets = 'all'
1189 try:
1189 try:
1190 targets = self._build_targets(targets)[0]
1190 targets = self._build_targets(targets)[0]
1191 except NoEnginesRegistered:
1191 except NoEnginesRegistered:
1192 targets = []
1192 targets = []
1193 for t in targets:
1193 for t in targets:
1194 self.session.send(self._control_socket, 'shutdown_request',
1194 self.session.send(self._control_socket, 'shutdown_request',
1195 content={'restart':restart},ident=t)
1195 content={'restart':restart},ident=t)
1196 error = False
1196 error = False
1197 if block or hub:
1197 if block or hub:
1198 self._flush_ignored_control()
1198 self._flush_ignored_control()
1199 for i in range(len(targets)):
1199 for i in range(len(targets)):
1200 idents,msg = self.session.recv(self._control_socket, 0)
1200 idents,msg = self.session.recv(self._control_socket, 0)
1201 if self.debug:
1201 if self.debug:
1202 pprint(msg)
1202 pprint(msg)
1203 if msg['content']['status'] != 'ok':
1203 if msg['content']['status'] != 'ok':
1204 error = self._unwrap_exception(msg['content'])
1204 error = self._unwrap_exception(msg['content'])
1205 else:
1205 else:
1206 self._ignored_control_replies += len(targets)
1206 self._ignored_control_replies += len(targets)
1207
1207
1208 if hub:
1208 if hub:
1209 time.sleep(0.25)
1209 time.sleep(0.25)
1210 self.session.send(self._query_socket, 'shutdown_request')
1210 self.session.send(self._query_socket, 'shutdown_request')
1211 idents,msg = self.session.recv(self._query_socket, 0)
1211 idents,msg = self.session.recv(self._query_socket, 0)
1212 if self.debug:
1212 if self.debug:
1213 pprint(msg)
1213 pprint(msg)
1214 if msg['content']['status'] != 'ok':
1214 if msg['content']['status'] != 'ok':
1215 error = self._unwrap_exception(msg['content'])
1215 error = self._unwrap_exception(msg['content'])
1216
1216
1217 if error:
1217 if error:
1218 raise error
1218 raise error
1219
1219
1220 #--------------------------------------------------------------------------
1220 #--------------------------------------------------------------------------
1221 # Execution related methods
1221 # Execution related methods
1222 #--------------------------------------------------------------------------
1222 #--------------------------------------------------------------------------
1223
1223
1224 def _maybe_raise(self, result):
1224 def _maybe_raise(self, result):
1225 """wrapper for maybe raising an exception if apply failed."""
1225 """wrapper for maybe raising an exception if apply failed."""
1226 if isinstance(result, error.RemoteError):
1226 if isinstance(result, error.RemoteError):
1227 raise result
1227 raise result
1228
1228
1229 return result
1229 return result
1230
1230
1231 def send_apply_request(self, socket, f, args=None, kwargs=None, metadata=None, track=False,
1231 def send_apply_request(self, socket, f, args=None, kwargs=None, metadata=None, track=False,
1232 ident=None):
1232 ident=None):
1233 """construct and send an apply message via a socket.
1233 """construct and send an apply message via a socket.
1234
1234
1235 This is the principal method with which all engine execution is performed by views.
1235 This is the principal method with which all engine execution is performed by views.
1236 """
1236 """
1237
1237
1238 if self._closed:
1238 if self._closed:
1239 raise RuntimeError("Client cannot be used after its sockets have been closed")
1239 raise RuntimeError("Client cannot be used after its sockets have been closed")
1240
1240
1241 # defaults:
1241 # defaults:
1242 args = args if args is not None else []
1242 args = args if args is not None else []
1243 kwargs = kwargs if kwargs is not None else {}
1243 kwargs = kwargs if kwargs is not None else {}
1244 metadata = metadata if metadata is not None else {}
1244 metadata = metadata if metadata is not None else {}
1245
1245
1246 # validate arguments
1246 # validate arguments
1247 if not callable(f) and not isinstance(f, Reference):
1247 if not callable(f) and not isinstance(f, Reference):
1248 raise TypeError("f must be callable, not %s"%type(f))
1248 raise TypeError("f must be callable, not %s"%type(f))
1249 if not isinstance(args, (tuple, list)):
1249 if not isinstance(args, (tuple, list)):
1250 raise TypeError("args must be tuple or list, not %s"%type(args))
1250 raise TypeError("args must be tuple or list, not %s"%type(args))
1251 if not isinstance(kwargs, dict):
1251 if not isinstance(kwargs, dict):
1252 raise TypeError("kwargs must be dict, not %s"%type(kwargs))
1252 raise TypeError("kwargs must be dict, not %s"%type(kwargs))
1253 if not isinstance(metadata, dict):
1253 if not isinstance(metadata, dict):
1254 raise TypeError("metadata must be dict, not %s"%type(metadata))
1254 raise TypeError("metadata must be dict, not %s"%type(metadata))
1255
1255
1256 bufs = serialize.pack_apply_message(f, args, kwargs,
1256 bufs = serialize.pack_apply_message(f, args, kwargs,
1257 buffer_threshold=self.session.buffer_threshold,
1257 buffer_threshold=self.session.buffer_threshold,
1258 item_threshold=self.session.item_threshold,
1258 item_threshold=self.session.item_threshold,
1259 )
1259 )
1260
1260
1261 msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident,
1261 msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident,
1262 metadata=metadata, track=track)
1262 metadata=metadata, track=track)
1263
1263
1264 msg_id = msg['header']['msg_id']
1264 msg_id = msg['header']['msg_id']
1265 self.outstanding.add(msg_id)
1265 self.outstanding.add(msg_id)
1266 if ident:
1266 if ident:
1267 # possibly routed to a specific engine
1267 # possibly routed to a specific engine
1268 if isinstance(ident, list):
1268 if isinstance(ident, list):
1269 ident = ident[-1]
1269 ident = ident[-1]
1270 if ident in self._engines.values():
1270 if ident in self._engines.values():
1271 # save for later, in case of engine death
1271 # save for later, in case of engine death
1272 self._outstanding_dict[ident].add(msg_id)
1272 self._outstanding_dict[ident].add(msg_id)
1273 self.history.append(msg_id)
1273 self.history.append(msg_id)
1274 self.metadata[msg_id]['submitted'] = datetime.now()
1274 self.metadata[msg_id]['submitted'] = datetime.now()
1275
1275
1276 return msg
1276 return msg
1277
1277
1278 def send_execute_request(self, socket, code, silent=True, metadata=None, ident=None):
1278 def send_execute_request(self, socket, code, silent=True, metadata=None, ident=None):
1279 """construct and send an execute request via a socket.
1279 """construct and send an execute request via a socket.
1280
1280
1281 """
1281 """
1282
1282
1283 if self._closed:
1283 if self._closed:
1284 raise RuntimeError("Client cannot be used after its sockets have been closed")
1284 raise RuntimeError("Client cannot be used after its sockets have been closed")
1285
1285
1286 # defaults:
1286 # defaults:
1287 metadata = metadata if metadata is not None else {}
1287 metadata = metadata if metadata is not None else {}
1288
1288
1289 # validate arguments
1289 # validate arguments
1290 if not isinstance(code, basestring):
1290 if not isinstance(code, basestring):
1291 raise TypeError("code must be text, not %s" % type(code))
1291 raise TypeError("code must be text, not %s" % type(code))
1292 if not isinstance(metadata, dict):
1292 if not isinstance(metadata, dict):
1293 raise TypeError("metadata must be dict, not %s" % type(metadata))
1293 raise TypeError("metadata must be dict, not %s" % type(metadata))
1294
1294
1295 content = dict(code=code, silent=bool(silent), user_variables=[], user_expressions={})
1295 content = dict(code=code, silent=bool(silent), user_variables=[], user_expressions={})
1296
1296
1297
1297
1298 msg = self.session.send(socket, "execute_request", content=content, ident=ident,
1298 msg = self.session.send(socket, "execute_request", content=content, ident=ident,
1299 metadata=metadata)
1299 metadata=metadata)
1300
1300
1301 msg_id = msg['header']['msg_id']
1301 msg_id = msg['header']['msg_id']
1302 self.outstanding.add(msg_id)
1302 self.outstanding.add(msg_id)
1303 if ident:
1303 if ident:
1304 # possibly routed to a specific engine
1304 # possibly routed to a specific engine
1305 if isinstance(ident, list):
1305 if isinstance(ident, list):
1306 ident = ident[-1]
1306 ident = ident[-1]
1307 if ident in self._engines.values():
1307 if ident in self._engines.values():
1308 # save for later, in case of engine death
1308 # save for later, in case of engine death
1309 self._outstanding_dict[ident].add(msg_id)
1309 self._outstanding_dict[ident].add(msg_id)
1310 self.history.append(msg_id)
1310 self.history.append(msg_id)
1311 self.metadata[msg_id]['submitted'] = datetime.now()
1311 self.metadata[msg_id]['submitted'] = datetime.now()
1312
1312
1313 return msg
1313 return msg
1314
1314
1315 #--------------------------------------------------------------------------
1315 #--------------------------------------------------------------------------
1316 # construct a View object
1316 # construct a View object
1317 #--------------------------------------------------------------------------
1317 #--------------------------------------------------------------------------
1318
1318
1319 def load_balanced_view(self, targets=None):
1319 def load_balanced_view(self, targets=None):
1320 """construct a DirectView object.
1320 """construct a DirectView object.
1321
1321
1322 If no arguments are specified, create a LoadBalancedView
1322 If no arguments are specified, create a LoadBalancedView
1323 using all engines.
1323 using all engines.
1324
1324
1325 Parameters
1325 Parameters
1326 ----------
1326 ----------
1327
1327
1328 targets: list,slice,int,etc. [default: use all engines]
1328 targets: list,slice,int,etc. [default: use all engines]
1329 The subset of engines across which to load-balance
1329 The subset of engines across which to load-balance
1330 """
1330 """
1331 if targets == 'all':
1331 if targets == 'all':
1332 targets = None
1332 targets = None
1333 if targets is not None:
1333 if targets is not None:
1334 targets = self._build_targets(targets)[1]
1334 targets = self._build_targets(targets)[1]
1335 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
1335 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
1336
1336
1337 def direct_view(self, targets='all'):
1337 def direct_view(self, targets='all'):
1338 """construct a DirectView object.
1338 """construct a DirectView object.
1339
1339
1340 If no targets are specified, create a DirectView using all engines.
1340 If no targets are specified, create a DirectView using all engines.
1341
1341
1342 rc.direct_view('all') is distinguished from rc[:] in that 'all' will
1342 rc.direct_view('all') is distinguished from rc[:] in that 'all' will
1343 evaluate the target engines at each execution, whereas rc[:] will connect to
1343 evaluate the target engines at each execution, whereas rc[:] will connect to
1344 all *current* engines, and that list will not change.
1344 all *current* engines, and that list will not change.
1345
1345
1346 That is, 'all' will always use all engines, whereas rc[:] will not use
1346 That is, 'all' will always use all engines, whereas rc[:] will not use
1347 engines added after the DirectView is constructed.
1347 engines added after the DirectView is constructed.
1348
1348
1349 Parameters
1349 Parameters
1350 ----------
1350 ----------
1351
1351
1352 targets: list,slice,int,etc. [default: use all engines]
1352 targets: list,slice,int,etc. [default: use all engines]
1353 The engines to use for the View
1353 The engines to use for the View
1354 """
1354 """
1355 single = isinstance(targets, int)
1355 single = isinstance(targets, int)
1356 # allow 'all' to be lazily evaluated at each execution
1356 # allow 'all' to be lazily evaluated at each execution
1357 if targets != 'all':
1357 if targets != 'all':
1358 targets = self._build_targets(targets)[1]
1358 targets = self._build_targets(targets)[1]
1359 if single:
1359 if single:
1360 targets = targets[0]
1360 targets = targets[0]
1361 return DirectView(client=self, socket=self._mux_socket, targets=targets)
1361 return DirectView(client=self, socket=self._mux_socket, targets=targets)
1362
1362
1363 #--------------------------------------------------------------------------
1363 #--------------------------------------------------------------------------
1364 # Query methods
1364 # Query methods
1365 #--------------------------------------------------------------------------
1365 #--------------------------------------------------------------------------
1366
1366
1367 @spin_first
1367 @spin_first
1368 def get_result(self, indices_or_msg_ids=None, block=None):
1368 def get_result(self, indices_or_msg_ids=None, block=None):
1369 """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
1369 """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
1370
1370
1371 If the client already has the results, no request to the Hub will be made.
1371 If the client already has the results, no request to the Hub will be made.
1372
1372
1373 This is a convenient way to construct AsyncResult objects, which are wrappers
1373 This is a convenient way to construct AsyncResult objects, which are wrappers
1374 that include metadata about execution, and allow for awaiting results that
1374 that include metadata about execution, and allow for awaiting results that
1375 were not submitted by this Client.
1375 were not submitted by this Client.
1376
1376
1377 It can also be a convenient way to retrieve the metadata associated with
1377 It can also be a convenient way to retrieve the metadata associated with
1378 blocking execution, since it always retrieves
1378 blocking execution, since it always retrieves
1379
1379
1380 Examples
1380 Examples
1381 --------
1381 --------
1382 ::
1382 ::
1383
1383
1384 In [10]: r = client.apply()
1384 In [10]: r = client.apply()
1385
1385
1386 Parameters
1386 Parameters
1387 ----------
1387 ----------
1388
1388
1389 indices_or_msg_ids : integer history index, str msg_id, or list of either
1389 indices_or_msg_ids : integer history index, str msg_id, or list of either
1390 The indices or msg_ids of indices to be retrieved
1390 The indices or msg_ids of indices to be retrieved
1391
1391
1392 block : bool
1392 block : bool
1393 Whether to wait for the result to be done
1393 Whether to wait for the result to be done
1394
1394
1395 Returns
1395 Returns
1396 -------
1396 -------
1397
1397
1398 AsyncResult
1398 AsyncResult
1399 A single AsyncResult object will always be returned.
1399 A single AsyncResult object will always be returned.
1400
1400
1401 AsyncHubResult
1401 AsyncHubResult
1402 A subclass of AsyncResult that retrieves results from the Hub
1402 A subclass of AsyncResult that retrieves results from the Hub
1403
1403
1404 """
1404 """
1405 block = self.block if block is None else block
1405 block = self.block if block is None else block
1406 if indices_or_msg_ids is None:
1406 if indices_or_msg_ids is None:
1407 indices_or_msg_ids = -1
1407 indices_or_msg_ids = -1
1408
1408
1409 single_result = False
1409 single_result = False
1410 if not isinstance(indices_or_msg_ids, (list,tuple)):
1410 if not isinstance(indices_or_msg_ids, (list,tuple)):
1411 indices_or_msg_ids = [indices_or_msg_ids]
1411 indices_or_msg_ids = [indices_or_msg_ids]
1412 single_result = True
1412 single_result = True
1413
1413
1414 theids = []
1414 theids = []
1415 for id in indices_or_msg_ids:
1415 for id in indices_or_msg_ids:
1416 if isinstance(id, int):
1416 if isinstance(id, int):
1417 id = self.history[id]
1417 id = self.history[id]
1418 if not isinstance(id, basestring):
1418 if not isinstance(id, basestring):
1419 raise TypeError("indices must be str or int, not %r"%id)
1419 raise TypeError("indices must be str or int, not %r"%id)
1420 theids.append(id)
1420 theids.append(id)
1421
1421
1422 local_ids = filter(lambda msg_id: msg_id in self.outstanding or msg_id in self.results, theids)
1422 local_ids = filter(lambda msg_id: msg_id in self.outstanding or msg_id in self.results, theids)
1423 remote_ids = filter(lambda msg_id: msg_id not in local_ids, theids)
1423 remote_ids = filter(lambda msg_id: msg_id not in local_ids, theids)
1424
1424
1425 # given single msg_id initially, get_result shot get the result itself,
1425 # given single msg_id initially, get_result shot get the result itself,
1426 # not a length-one list
1426 # not a length-one list
1427 if single_result:
1427 if single_result:
1428 theids = theids[0]
1428 theids = theids[0]
1429
1429
1430 if remote_ids:
1430 if remote_ids:
1431 ar = AsyncHubResult(self, msg_ids=theids)
1431 ar = AsyncHubResult(self, msg_ids=theids)
1432 else:
1432 else:
1433 ar = AsyncResult(self, msg_ids=theids)
1433 ar = AsyncResult(self, msg_ids=theids)
1434
1434
1435 if block:
1435 if block:
1436 ar.wait()
1436 ar.wait()
1437
1437
1438 return ar
1438 return ar
1439
1439
1440 @spin_first
1440 @spin_first
1441 def resubmit(self, indices_or_msg_ids=None, metadata=None, block=None):
1441 def resubmit(self, indices_or_msg_ids=None, metadata=None, block=None):
1442 """Resubmit one or more tasks.
1442 """Resubmit one or more tasks.
1443
1443
1444 in-flight tasks may not be resubmitted.
1444 in-flight tasks may not be resubmitted.
1445
1445
1446 Parameters
1446 Parameters
1447 ----------
1447 ----------
1448
1448
1449 indices_or_msg_ids : integer history index, str msg_id, or list of either
1449 indices_or_msg_ids : integer history index, str msg_id, or list of either
1450 The indices or msg_ids of indices to be retrieved
1450 The indices or msg_ids of indices to be retrieved
1451
1451
1452 block : bool
1452 block : bool
1453 Whether to wait for the result to be done
1453 Whether to wait for the result to be done
1454
1454
1455 Returns
1455 Returns
1456 -------
1456 -------
1457
1457
1458 AsyncHubResult
1458 AsyncHubResult
1459 A subclass of AsyncResult that retrieves results from the Hub
1459 A subclass of AsyncResult that retrieves results from the Hub
1460
1460
1461 """
1461 """
1462 block = self.block if block is None else block
1462 block = self.block if block is None else block
1463 if indices_or_msg_ids is None:
1463 if indices_or_msg_ids is None:
1464 indices_or_msg_ids = -1
1464 indices_or_msg_ids = -1
1465
1465
1466 if not isinstance(indices_or_msg_ids, (list,tuple)):
1466 if not isinstance(indices_or_msg_ids, (list,tuple)):
1467 indices_or_msg_ids = [indices_or_msg_ids]
1467 indices_or_msg_ids = [indices_or_msg_ids]
1468
1468
1469 theids = []
1469 theids = []
1470 for id in indices_or_msg_ids:
1470 for id in indices_or_msg_ids:
1471 if isinstance(id, int):
1471 if isinstance(id, int):
1472 id = self.history[id]
1472 id = self.history[id]
1473 if not isinstance(id, basestring):
1473 if not isinstance(id, basestring):
1474 raise TypeError("indices must be str or int, not %r"%id)
1474 raise TypeError("indices must be str or int, not %r"%id)
1475 theids.append(id)
1475 theids.append(id)
1476
1476
1477 content = dict(msg_ids = theids)
1477 content = dict(msg_ids = theids)
1478
1478
1479 self.session.send(self._query_socket, 'resubmit_request', content)
1479 self.session.send(self._query_socket, 'resubmit_request', content)
1480
1480
1481 zmq.select([self._query_socket], [], [])
1481 zmq.select([self._query_socket], [], [])
1482 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1482 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1483 if self.debug:
1483 if self.debug:
1484 pprint(msg)
1484 pprint(msg)
1485 content = msg['content']
1485 content = msg['content']
1486 if content['status'] != 'ok':
1486 if content['status'] != 'ok':
1487 raise self._unwrap_exception(content)
1487 raise self._unwrap_exception(content)
1488 mapping = content['resubmitted']
1488 mapping = content['resubmitted']
1489 new_ids = [ mapping[msg_id] for msg_id in theids ]
1489 new_ids = [ mapping[msg_id] for msg_id in theids ]
1490
1490
1491 ar = AsyncHubResult(self, msg_ids=new_ids)
1491 ar = AsyncHubResult(self, msg_ids=new_ids)
1492
1492
1493 if block:
1493 if block:
1494 ar.wait()
1494 ar.wait()
1495
1495
1496 return ar
1496 return ar
1497
1497
1498 @spin_first
1498 @spin_first
1499 def result_status(self, msg_ids, status_only=True):
1499 def result_status(self, msg_ids, status_only=True):
1500 """Check on the status of the result(s) of the apply request with `msg_ids`.
1500 """Check on the status of the result(s) of the apply request with `msg_ids`.
1501
1501
1502 If status_only is False, then the actual results will be retrieved, else
1502 If status_only is False, then the actual results will be retrieved, else
1503 only the status of the results will be checked.
1503 only the status of the results will be checked.
1504
1504
1505 Parameters
1505 Parameters
1506 ----------
1506 ----------
1507
1507
1508 msg_ids : list of msg_ids
1508 msg_ids : list of msg_ids
1509 if int:
1509 if int:
1510 Passed as index to self.history for convenience.
1510 Passed as index to self.history for convenience.
1511 status_only : bool (default: True)
1511 status_only : bool (default: True)
1512 if False:
1512 if False:
1513 Retrieve the actual results of completed tasks.
1513 Retrieve the actual results of completed tasks.
1514
1514
1515 Returns
1515 Returns
1516 -------
1516 -------
1517
1517
1518 results : dict
1518 results : dict
1519 There will always be the keys 'pending' and 'completed', which will
1519 There will always be the keys 'pending' and 'completed', which will
1520 be lists of msg_ids that are incomplete or complete. If `status_only`
1520 be lists of msg_ids that are incomplete or complete. If `status_only`
1521 is False, then completed results will be keyed by their `msg_id`.
1521 is False, then completed results will be keyed by their `msg_id`.
1522 """
1522 """
1523 if not isinstance(msg_ids, (list,tuple)):
1523 if not isinstance(msg_ids, (list,tuple)):
1524 msg_ids = [msg_ids]
1524 msg_ids = [msg_ids]
1525
1525
1526 theids = []
1526 theids = []
1527 for msg_id in msg_ids:
1527 for msg_id in msg_ids:
1528 if isinstance(msg_id, int):
1528 if isinstance(msg_id, int):
1529 msg_id = self.history[msg_id]
1529 msg_id = self.history[msg_id]
1530 if not isinstance(msg_id, basestring):
1530 if not isinstance(msg_id, basestring):
1531 raise TypeError("msg_ids must be str, not %r"%msg_id)
1531 raise TypeError("msg_ids must be str, not %r"%msg_id)
1532 theids.append(msg_id)
1532 theids.append(msg_id)
1533
1533
1534 completed = []
1534 completed = []
1535 local_results = {}
1535 local_results = {}
1536
1536
1537 # comment this block out to temporarily disable local shortcut:
1537 # comment this block out to temporarily disable local shortcut:
1538 for msg_id in theids:
1538 for msg_id in theids:
1539 if msg_id in self.results:
1539 if msg_id in self.results:
1540 completed.append(msg_id)
1540 completed.append(msg_id)
1541 local_results[msg_id] = self.results[msg_id]
1541 local_results[msg_id] = self.results[msg_id]
1542 theids.remove(msg_id)
1542 theids.remove(msg_id)
1543
1543
1544 if theids: # some not locally cached
1544 if theids: # some not locally cached
1545 content = dict(msg_ids=theids, status_only=status_only)
1545 content = dict(msg_ids=theids, status_only=status_only)
1546 msg = self.session.send(self._query_socket, "result_request", content=content)
1546 msg = self.session.send(self._query_socket, "result_request", content=content)
1547 zmq.select([self._query_socket], [], [])
1547 zmq.select([self._query_socket], [], [])
1548 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1548 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1549 if self.debug:
1549 if self.debug:
1550 pprint(msg)
1550 pprint(msg)
1551 content = msg['content']
1551 content = msg['content']
1552 if content['status'] != 'ok':
1552 if content['status'] != 'ok':
1553 raise self._unwrap_exception(content)
1553 raise self._unwrap_exception(content)
1554 buffers = msg['buffers']
1554 buffers = msg['buffers']
1555 else:
1555 else:
1556 content = dict(completed=[],pending=[])
1556 content = dict(completed=[],pending=[])
1557
1557
1558 content['completed'].extend(completed)
1558 content['completed'].extend(completed)
1559
1559
1560 if status_only:
1560 if status_only:
1561 return content
1561 return content
1562
1562
1563 failures = []
1563 failures = []
1564 # load cached results into result:
1564 # load cached results into result:
1565 content.update(local_results)
1565 content.update(local_results)
1566
1566
1567 # update cache with results:
1567 # update cache with results:
1568 for msg_id in sorted(theids):
1568 for msg_id in sorted(theids):
1569 if msg_id in content['completed']:
1569 if msg_id in content['completed']:
1570 rec = content[msg_id]
1570 rec = content[msg_id]
1571 parent = rec['header']
1571 parent = rec['header']
1572 header = rec['result_header']
1572 header = rec['result_header']
1573 rcontent = rec['result_content']
1573 rcontent = rec['result_content']
1574 iodict = rec['io']
1574 iodict = rec['io']
1575 if isinstance(rcontent, str):
1575 if isinstance(rcontent, str):
1576 rcontent = self.session.unpack(rcontent)
1576 rcontent = self.session.unpack(rcontent)
1577
1577
1578 md = self.metadata[msg_id]
1578 md = self.metadata[msg_id]
1579 md_msg = dict(
1579 md_msg = dict(
1580 content=rcontent,
1580 content=rcontent,
1581 parent_header=parent,
1581 parent_header=parent,
1582 header=header,
1582 header=header,
1583 metadata=rec['result_metadata'],
1583 metadata=rec['result_metadata'],
1584 )
1584 )
1585 md.update(self._extract_metadata(md_msg))
1585 md.update(self._extract_metadata(md_msg))
1586 if rec.get('received'):
1586 if rec.get('received'):
1587 md['received'] = rec['received']
1587 md['received'] = rec['received']
1588 md.update(iodict)
1588 md.update(iodict)
1589
1589
1590 if rcontent['status'] == 'ok':
1590 if rcontent['status'] == 'ok':
1591 if header['msg_type'] == 'apply_reply':
1591 if header['msg_type'] == 'apply_reply':
1592 res,buffers = serialize.unserialize_object(buffers)
1592 res,buffers = serialize.unserialize_object(buffers)
1593 elif header['msg_type'] == 'execute_reply':
1593 elif header['msg_type'] == 'execute_reply':
1594 res = ExecuteReply(msg_id, rcontent, md)
1594 res = ExecuteReply(msg_id, rcontent, md)
1595 else:
1595 else:
1596 raise KeyError("unhandled msg type: %r" % header['msg_type'])
1596 raise KeyError("unhandled msg type: %r" % header['msg_type'])
1597 else:
1597 else:
1598 res = self._unwrap_exception(rcontent)
1598 res = self._unwrap_exception(rcontent)
1599 failures.append(res)
1599 failures.append(res)
1600
1600
1601 self.results[msg_id] = res
1601 self.results[msg_id] = res
1602 content[msg_id] = res
1602 content[msg_id] = res
1603
1603
1604 if len(theids) == 1 and failures:
1604 if len(theids) == 1 and failures:
1605 raise failures[0]
1605 raise failures[0]
1606
1606
1607 error.collect_exceptions(failures, "result_status")
1607 error.collect_exceptions(failures, "result_status")
1608 return content
1608 return content
1609
1609
1610 @spin_first
1610 @spin_first
1611 def queue_status(self, targets='all', verbose=False):
1611 def queue_status(self, targets='all', verbose=False):
1612 """Fetch the status of engine queues.
1612 """Fetch the status of engine queues.
1613
1613
1614 Parameters
1614 Parameters
1615 ----------
1615 ----------
1616
1616
1617 targets : int/str/list of ints/strs
1617 targets : int/str/list of ints/strs
1618 the engines whose states are to be queried.
1618 the engines whose states are to be queried.
1619 default : all
1619 default : all
1620 verbose : bool
1620 verbose : bool
1621 Whether to return lengths only, or lists of ids for each element
1621 Whether to return lengths only, or lists of ids for each element
1622 """
1622 """
1623 if targets == 'all':
1623 if targets == 'all':
1624 # allow 'all' to be evaluated on the engine
1624 # allow 'all' to be evaluated on the engine
1625 engine_ids = None
1625 engine_ids = None
1626 else:
1626 else:
1627 engine_ids = self._build_targets(targets)[1]
1627 engine_ids = self._build_targets(targets)[1]
1628 content = dict(targets=engine_ids, verbose=verbose)
1628 content = dict(targets=engine_ids, verbose=verbose)
1629 self.session.send(self._query_socket, "queue_request", content=content)
1629 self.session.send(self._query_socket, "queue_request", content=content)
1630 idents,msg = self.session.recv(self._query_socket, 0)
1630 idents,msg = self.session.recv(self._query_socket, 0)
1631 if self.debug:
1631 if self.debug:
1632 pprint(msg)
1632 pprint(msg)
1633 content = msg['content']
1633 content = msg['content']
1634 status = content.pop('status')
1634 status = content.pop('status')
1635 if status != 'ok':
1635 if status != 'ok':
1636 raise self._unwrap_exception(content)
1636 raise self._unwrap_exception(content)
1637 content = rekey(content)
1637 content = rekey(content)
1638 if isinstance(targets, int):
1638 if isinstance(targets, int):
1639 return content[targets]
1639 return content[targets]
1640 else:
1640 else:
1641 return content
1641 return content
1642
1642
1643 def _build_msgids_from_target(self, targets=None):
1643 def _build_msgids_from_target(self, targets=None):
1644 """Build a list of msg_ids from the list of engine targets"""
1644 """Build a list of msg_ids from the list of engine targets"""
1645 if not targets: # needed as _build_targets otherwise uses all engines
1645 if not targets: # needed as _build_targets otherwise uses all engines
1646 return []
1646 return []
1647 target_ids = self._build_targets(targets)[0]
1647 target_ids = self._build_targets(targets)[0]
1648 return filter(lambda md_id: self.metadata[md_id]["engine_uuid"] in target_ids, self.metadata)
1648 return filter(lambda md_id: self.metadata[md_id]["engine_uuid"] in target_ids, self.metadata)
1649
1649
1650 def _build_msgids_from_jobs(self, jobs=None):
1650 def _build_msgids_from_jobs(self, jobs=None):
1651 """Build a list of msg_ids from "jobs" """
1651 """Build a list of msg_ids from "jobs" """
1652 if not jobs:
1652 if not jobs:
1653 return []
1653 return []
1654 msg_ids = []
1654 msg_ids = []
1655 if isinstance(jobs, (basestring,AsyncResult)):
1655 if isinstance(jobs, (basestring,AsyncResult)):
1656 jobs = [jobs]
1656 jobs = [jobs]
1657 bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
1657 bad_ids = filter(lambda obj: not isinstance(obj, (basestring, AsyncResult)), jobs)
1658 if bad_ids:
1658 if bad_ids:
1659 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1659 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1660 for j in jobs:
1660 for j in jobs:
1661 if isinstance(j, AsyncResult):
1661 if isinstance(j, AsyncResult):
1662 msg_ids.extend(j.msg_ids)
1662 msg_ids.extend(j.msg_ids)
1663 else:
1663 else:
1664 msg_ids.append(j)
1664 msg_ids.append(j)
1665 return msg_ids
1665 return msg_ids
1666
1666
1667 def purge_local_results(self, jobs=[], targets=[]):
1667 def purge_local_results(self, jobs=[], targets=[]):
1668 """Clears the client caches of results and frees such memory.
1668 """Clears the client caches of results and frees such memory.
1669
1669
1670 Individual results can be purged by msg_id, or the entire
1670 Individual results can be purged by msg_id, or the entire
1671 history of specific targets can be purged.
1671 history of specific targets can be purged.
1672
1672
1673 Use `purge_local_results('all')` to scrub everything from the Clients's db.
1673 Use `purge_local_results('all')` to scrub everything from the Clients's db.
1674
1674
1675 The client must have no outstanding tasks before purging the caches.
1675 The client must have no outstanding tasks before purging the caches.
1676 Raises `AssertionError` if there are still outstanding tasks.
1676 Raises `AssertionError` if there are still outstanding tasks.
1677
1677
1678 After this call all `AsyncResults` are invalid and should be discarded.
1678 After this call all `AsyncResults` are invalid and should be discarded.
1679
1679
1680 If you must "reget" the results, you can still do so by using
1680 If you must "reget" the results, you can still do so by using
1681 `client.get_result(msg_id)` or `client.get_result(asyncresult)`. This will
1681 `client.get_result(msg_id)` or `client.get_result(asyncresult)`. This will
1682 redownload the results from the hub if they are still available
1682 redownload the results from the hub if they are still available
1683 (i.e `client.purge_hub_results(...)` has not been called.
1683 (i.e `client.purge_hub_results(...)` has not been called.
1684
1684
1685 Parameters
1685 Parameters
1686 ----------
1686 ----------
1687
1687
1688 jobs : str or list of str or AsyncResult objects
1688 jobs : str or list of str or AsyncResult objects
1689 the msg_ids whose results should be purged.
1689 the msg_ids whose results should be purged.
1690 targets : int/str/list of ints/strs
1690 targets : int/str/list of ints/strs
1691 The targets, by int_id, whose entire results are to be purged.
1691 The targets, by int_id, whose entire results are to be purged.
1692
1692
1693 default : None
1693 default : None
1694 """
1694 """
1695 assert not self.outstanding, "Can't purge a client with outstanding tasks!"
1695 assert not self.outstanding, "Can't purge a client with outstanding tasks!"
1696
1696
1697 if not targets and not jobs:
1697 if not targets and not jobs:
1698 raise ValueError("Must specify at least one of `targets` and `jobs`")
1698 raise ValueError("Must specify at least one of `targets` and `jobs`")
1699
1699
1700 if jobs == 'all':
1700 if jobs == 'all':
1701 self.results.clear()
1701 self.results.clear()
1702 self.metadata.clear()
1702 self.metadata.clear()
1703 return
1703 return
1704 else:
1704 else:
1705 msg_ids = []
1705 msg_ids = []
1706 msg_ids.extend(self._build_msgids_from_target(targets))
1706 msg_ids.extend(self._build_msgids_from_target(targets))
1707 msg_ids.extend(self._build_msgids_from_jobs(jobs))
1707 msg_ids.extend(self._build_msgids_from_jobs(jobs))
1708 map(self.results.pop, msg_ids)
1708 map(self.results.pop, msg_ids)
1709 map(self.metadata.pop, msg_ids)
1709 map(self.metadata.pop, msg_ids)
1710
1710
1711
1711
1712 @spin_first
1712 @spin_first
1713 def purge_hub_results(self, jobs=[], targets=[]):
1713 def purge_hub_results(self, jobs=[], targets=[]):
1714 """Tell the Hub to forget results.
1714 """Tell the Hub to forget results.
1715
1715
1716 Individual results can be purged by msg_id, or the entire
1716 Individual results can be purged by msg_id, or the entire
1717 history of specific targets can be purged.
1717 history of specific targets can be purged.
1718
1718
1719 Use `purge_results('all')` to scrub everything from the Hub's db.
1719 Use `purge_results('all')` to scrub everything from the Hub's db.
1720
1720
1721 Parameters
1721 Parameters
1722 ----------
1722 ----------
1723
1723
1724 jobs : str or list of str or AsyncResult objects
1724 jobs : str or list of str or AsyncResult objects
1725 the msg_ids whose results should be forgotten.
1725 the msg_ids whose results should be forgotten.
1726 targets : int/str/list of ints/strs
1726 targets : int/str/list of ints/strs
1727 The targets, by int_id, whose entire history is to be purged.
1727 The targets, by int_id, whose entire history is to be purged.
1728
1728
1729 default : None
1729 default : None
1730 """
1730 """
1731 if not targets and not jobs:
1731 if not targets and not jobs:
1732 raise ValueError("Must specify at least one of `targets` and `jobs`")
1732 raise ValueError("Must specify at least one of `targets` and `jobs`")
1733 if targets:
1733 if targets:
1734 targets = self._build_targets(targets)[1]
1734 targets = self._build_targets(targets)[1]
1735
1735
1736 # construct msg_ids from jobs
1736 # construct msg_ids from jobs
1737 if jobs == 'all':
1737 if jobs == 'all':
1738 msg_ids = jobs
1738 msg_ids = jobs
1739 else:
1739 else:
1740 msg_ids = self._build_msgids_from_jobs(jobs)
1740 msg_ids = self._build_msgids_from_jobs(jobs)
1741
1741
1742 content = dict(engine_ids=targets, msg_ids=msg_ids)
1742 content = dict(engine_ids=targets, msg_ids=msg_ids)
1743 self.session.send(self._query_socket, "purge_request", content=content)
1743 self.session.send(self._query_socket, "purge_request", content=content)
1744 idents, msg = self.session.recv(self._query_socket, 0)
1744 idents, msg = self.session.recv(self._query_socket, 0)
1745 if self.debug:
1745 if self.debug:
1746 pprint(msg)
1746 pprint(msg)
1747 content = msg['content']
1747 content = msg['content']
1748 if content['status'] != 'ok':
1748 if content['status'] != 'ok':
1749 raise self._unwrap_exception(content)
1749 raise self._unwrap_exception(content)
1750
1750
1751 def purge_results(self, jobs=[], targets=[]):
1751 def purge_results(self, jobs=[], targets=[]):
1752 """Clears the cached results from both the hub and the local client
1752 """Clears the cached results from both the hub and the local client
1753
1753
1754 Individual results can be purged by msg_id, or the entire
1754 Individual results can be purged by msg_id, or the entire
1755 history of specific targets can be purged.
1755 history of specific targets can be purged.
1756
1756
1757 Use `purge_results('all')` to scrub every cached result from both the Hub's and
1757 Use `purge_results('all')` to scrub every cached result from both the Hub's and
1758 the Client's db.
1758 the Client's db.
1759
1759
1760 Equivalent to calling both `purge_hub_results()` and `purge_client_results()` with
1760 Equivalent to calling both `purge_hub_results()` and `purge_client_results()` with
1761 the same arguments.
1761 the same arguments.
1762
1762
1763 Parameters
1763 Parameters
1764 ----------
1764 ----------
1765
1765
1766 jobs : str or list of str or AsyncResult objects
1766 jobs : str or list of str or AsyncResult objects
1767 the msg_ids whose results should be forgotten.
1767 the msg_ids whose results should be forgotten.
1768 targets : int/str/list of ints/strs
1768 targets : int/str/list of ints/strs
1769 The targets, by int_id, whose entire history is to be purged.
1769 The targets, by int_id, whose entire history is to be purged.
1770
1770
1771 default : None
1771 default : None
1772 """
1772 """
1773 self.purge_local_results(jobs=jobs, targets=targets)
1773 self.purge_local_results(jobs=jobs, targets=targets)
1774 self.purge_hub_results(jobs=jobs, targets=targets)
1774 self.purge_hub_results(jobs=jobs, targets=targets)
1775
1775
1776 def purge_everything(self):
1776 def purge_everything(self):
1777 """Clears all content from previous Tasks from both the hub and the local client
1777 """Clears all content from previous Tasks from both the hub and the local client
1778
1778
1779 In addition to calling `purge_results("all")` it also deletes the history and
1779 In addition to calling `purge_results("all")` it also deletes the history and
1780 other bookkeeping lists.
1780 other bookkeeping lists.
1781 """
1781 """
1782 self.purge_results("all")
1782 self.purge_results("all")
1783 self.history = []
1783 self.history = []
1784 self.session.digest_history.clear()
1784 self.session.digest_history.clear()
1785
1785
1786 @spin_first
1786 @spin_first
1787 def hub_history(self):
1787 def hub_history(self):
1788 """Get the Hub's history
1788 """Get the Hub's history
1789
1789
1790 Just like the Client, the Hub has a history, which is a list of msg_ids.
1790 Just like the Client, the Hub has a history, which is a list of msg_ids.
1791 This will contain the history of all clients, and, depending on configuration,
1791 This will contain the history of all clients, and, depending on configuration,
1792 may contain history across multiple cluster sessions.
1792 may contain history across multiple cluster sessions.
1793
1793
1794 Any msg_id returned here is a valid argument to `get_result`.
1794 Any msg_id returned here is a valid argument to `get_result`.
1795
1795
1796 Returns
1796 Returns
1797 -------
1797 -------
1798
1798
1799 msg_ids : list of strs
1799 msg_ids : list of strs
1800 list of all msg_ids, ordered by task submission time.
1800 list of all msg_ids, ordered by task submission time.
1801 """
1801 """
1802
1802
1803 self.session.send(self._query_socket, "history_request", content={})
1803 self.session.send(self._query_socket, "history_request", content={})
1804 idents, msg = self.session.recv(self._query_socket, 0)
1804 idents, msg = self.session.recv(self._query_socket, 0)
1805
1805
1806 if self.debug:
1806 if self.debug:
1807 pprint(msg)
1807 pprint(msg)
1808 content = msg['content']
1808 content = msg['content']
1809 if content['status'] != 'ok':
1809 if content['status'] != 'ok':
1810 raise self._unwrap_exception(content)
1810 raise self._unwrap_exception(content)
1811 else:
1811 else:
1812 return content['history']
1812 return content['history']
1813
1813
1814 @spin_first
1814 @spin_first
1815 def db_query(self, query, keys=None):
1815 def db_query(self, query, keys=None):
1816 """Query the Hub's TaskRecord database
1816 """Query the Hub's TaskRecord database
1817
1817
1818 This will return a list of task record dicts that match `query`
1818 This will return a list of task record dicts that match `query`
1819
1819
1820 Parameters
1820 Parameters
1821 ----------
1821 ----------
1822
1822
1823 query : mongodb query dict
1823 query : mongodb query dict
1824 The search dict. See mongodb query docs for details.
1824 The search dict. See mongodb query docs for details.
1825 keys : list of strs [optional]
1825 keys : list of strs [optional]
1826 The subset of keys to be returned. The default is to fetch everything but buffers.
1826 The subset of keys to be returned. The default is to fetch everything but buffers.
1827 'msg_id' will *always* be included.
1827 'msg_id' will *always* be included.
1828 """
1828 """
1829 if isinstance(keys, basestring):
1829 if isinstance(keys, basestring):
1830 keys = [keys]
1830 keys = [keys]
1831 content = dict(query=query, keys=keys)
1831 content = dict(query=query, keys=keys)
1832 self.session.send(self._query_socket, "db_request", content=content)
1832 self.session.send(self._query_socket, "db_request", content=content)
1833 idents, msg = self.session.recv(self._query_socket, 0)
1833 idents, msg = self.session.recv(self._query_socket, 0)
1834 if self.debug:
1834 if self.debug:
1835 pprint(msg)
1835 pprint(msg)
1836 content = msg['content']
1836 content = msg['content']
1837 if content['status'] != 'ok':
1837 if content['status'] != 'ok':
1838 raise self._unwrap_exception(content)
1838 raise self._unwrap_exception(content)
1839
1839
1840 records = content['records']
1840 records = content['records']
1841
1841
1842 buffer_lens = content['buffer_lens']
1842 buffer_lens = content['buffer_lens']
1843 result_buffer_lens = content['result_buffer_lens']
1843 result_buffer_lens = content['result_buffer_lens']
1844 buffers = msg['buffers']
1844 buffers = msg['buffers']
1845 has_bufs = buffer_lens is not None
1845 has_bufs = buffer_lens is not None
1846 has_rbufs = result_buffer_lens is not None
1846 has_rbufs = result_buffer_lens is not None
1847 for i,rec in enumerate(records):
1847 for i,rec in enumerate(records):
1848 # relink buffers
1848 # relink buffers
1849 if has_bufs:
1849 if has_bufs:
1850 blen = buffer_lens[i]
1850 blen = buffer_lens[i]
1851 rec['buffers'], buffers = buffers[:blen],buffers[blen:]
1851 rec['buffers'], buffers = buffers[:blen],buffers[blen:]
1852 if has_rbufs:
1852 if has_rbufs:
1853 blen = result_buffer_lens[i]
1853 blen = result_buffer_lens[i]
1854 rec['result_buffers'], buffers = buffers[:blen],buffers[blen:]
1854 rec['result_buffers'], buffers = buffers[:blen],buffers[blen:]
1855
1855
1856 return records
1856 return records
1857
1857
1858 __all__ = [ 'Client' ]
1858 __all__ = [ 'Client' ]
@@ -1,1417 +1,1422 b''
1 """The IPython Controller Hub with 0MQ
1 """The IPython Controller Hub with 0MQ
2 This is the master object that handles connections from engines and clients,
2 This is the master object that handles connections from engines and clients,
3 and monitors traffic through the various queues.
3 and monitors traffic through the various queues.
4
4
5 Authors:
5 Authors:
6
6
7 * Min RK
7 * Min RK
8 """
8 """
9 #-----------------------------------------------------------------------------
9 #-----------------------------------------------------------------------------
10 # Copyright (C) 2010-2011 The IPython Development Team
10 # Copyright (C) 2010-2011 The IPython Development Team
11 #
11 #
12 # Distributed under the terms of the BSD License. The full license is in
12 # Distributed under the terms of the BSD License. The full license is in
13 # the file COPYING, distributed as part of this software.
13 # the file COPYING, distributed as part of this software.
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15
15
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17 # Imports
17 # Imports
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19 from __future__ import print_function
19 from __future__ import print_function
20
20
21 import json
21 import json
22 import os
22 import os
23 import sys
23 import sys
24 import time
24 import time
25 from datetime import datetime
25 from datetime import datetime
26
26
27 import zmq
27 import zmq
28 from zmq.eventloop import ioloop
28 from zmq.eventloop import ioloop
29 from zmq.eventloop.zmqstream import ZMQStream
29 from zmq.eventloop.zmqstream import ZMQStream
30
30
31 # internal:
31 # internal:
32 from IPython.utils.importstring import import_item
32 from IPython.utils.importstring import import_item
33 from IPython.utils.localinterfaces import LOCALHOST
33 from IPython.utils.localinterfaces import localhost
34 from IPython.utils.py3compat import cast_bytes
34 from IPython.utils.py3compat import cast_bytes
35 from IPython.utils.traitlets import (
35 from IPython.utils.traitlets import (
36 HasTraits, Instance, Integer, Unicode, Dict, Set, Tuple, CBytes, DottedObjectName
36 HasTraits, Instance, Integer, Unicode, Dict, Set, Tuple, CBytes, DottedObjectName
37 )
37 )
38
38
39 from IPython.parallel import error, util
39 from IPython.parallel import error, util
40 from IPython.parallel.factory import RegistrationFactory
40 from IPython.parallel.factory import RegistrationFactory
41
41
42 from IPython.kernel.zmq.session import SessionFactory
42 from IPython.kernel.zmq.session import SessionFactory
43
43
44 from .heartmonitor import HeartMonitor
44 from .heartmonitor import HeartMonitor
45
45
46 #-----------------------------------------------------------------------------
46 #-----------------------------------------------------------------------------
47 # Code
47 # Code
48 #-----------------------------------------------------------------------------
48 #-----------------------------------------------------------------------------
49
49
50 def _passer(*args, **kwargs):
50 def _passer(*args, **kwargs):
51 return
51 return
52
52
53 def _printer(*args, **kwargs):
53 def _printer(*args, **kwargs):
54 print (args)
54 print (args)
55 print (kwargs)
55 print (kwargs)
56
56
57 def empty_record():
57 def empty_record():
58 """Return an empty dict with all record keys."""
58 """Return an empty dict with all record keys."""
59 return {
59 return {
60 'msg_id' : None,
60 'msg_id' : None,
61 'header' : None,
61 'header' : None,
62 'metadata' : None,
62 'metadata' : None,
63 'content': None,
63 'content': None,
64 'buffers': None,
64 'buffers': None,
65 'submitted': None,
65 'submitted': None,
66 'client_uuid' : None,
66 'client_uuid' : None,
67 'engine_uuid' : None,
67 'engine_uuid' : None,
68 'started': None,
68 'started': None,
69 'completed': None,
69 'completed': None,
70 'resubmitted': None,
70 'resubmitted': None,
71 'received': None,
71 'received': None,
72 'result_header' : None,
72 'result_header' : None,
73 'result_metadata' : None,
73 'result_metadata' : None,
74 'result_content' : None,
74 'result_content' : None,
75 'result_buffers' : None,
75 'result_buffers' : None,
76 'queue' : None,
76 'queue' : None,
77 'pyin' : None,
77 'pyin' : None,
78 'pyout': None,
78 'pyout': None,
79 'pyerr': None,
79 'pyerr': None,
80 'stdout': '',
80 'stdout': '',
81 'stderr': '',
81 'stderr': '',
82 }
82 }
83
83
84 def init_record(msg):
84 def init_record(msg):
85 """Initialize a TaskRecord based on a request."""
85 """Initialize a TaskRecord based on a request."""
86 header = msg['header']
86 header = msg['header']
87 return {
87 return {
88 'msg_id' : header['msg_id'],
88 'msg_id' : header['msg_id'],
89 'header' : header,
89 'header' : header,
90 'content': msg['content'],
90 'content': msg['content'],
91 'metadata': msg['metadata'],
91 'metadata': msg['metadata'],
92 'buffers': msg['buffers'],
92 'buffers': msg['buffers'],
93 'submitted': header['date'],
93 'submitted': header['date'],
94 'client_uuid' : None,
94 'client_uuid' : None,
95 'engine_uuid' : None,
95 'engine_uuid' : None,
96 'started': None,
96 'started': None,
97 'completed': None,
97 'completed': None,
98 'resubmitted': None,
98 'resubmitted': None,
99 'received': None,
99 'received': None,
100 'result_header' : None,
100 'result_header' : None,
101 'result_metadata': None,
101 'result_metadata': None,
102 'result_content' : None,
102 'result_content' : None,
103 'result_buffers' : None,
103 'result_buffers' : None,
104 'queue' : None,
104 'queue' : None,
105 'pyin' : None,
105 'pyin' : None,
106 'pyout': None,
106 'pyout': None,
107 'pyerr': None,
107 'pyerr': None,
108 'stdout': '',
108 'stdout': '',
109 'stderr': '',
109 'stderr': '',
110 }
110 }
111
111
112
112
113 class EngineConnector(HasTraits):
113 class EngineConnector(HasTraits):
114 """A simple object for accessing the various zmq connections of an object.
114 """A simple object for accessing the various zmq connections of an object.
115 Attributes are:
115 Attributes are:
116 id (int): engine ID
116 id (int): engine ID
117 uuid (unicode): engine UUID
117 uuid (unicode): engine UUID
118 pending: set of msg_ids
118 pending: set of msg_ids
119 stallback: DelayedCallback for stalled registration
119 stallback: DelayedCallback for stalled registration
120 """
120 """
121
121
122 id = Integer(0)
122 id = Integer(0)
123 uuid = Unicode()
123 uuid = Unicode()
124 pending = Set()
124 pending = Set()
125 stallback = Instance(ioloop.DelayedCallback)
125 stallback = Instance(ioloop.DelayedCallback)
126
126
127
127
128 _db_shortcuts = {
128 _db_shortcuts = {
129 'sqlitedb' : 'IPython.parallel.controller.sqlitedb.SQLiteDB',
129 'sqlitedb' : 'IPython.parallel.controller.sqlitedb.SQLiteDB',
130 'mongodb' : 'IPython.parallel.controller.mongodb.MongoDB',
130 'mongodb' : 'IPython.parallel.controller.mongodb.MongoDB',
131 'dictdb' : 'IPython.parallel.controller.dictdb.DictDB',
131 'dictdb' : 'IPython.parallel.controller.dictdb.DictDB',
132 'nodb' : 'IPython.parallel.controller.dictdb.NoDB',
132 'nodb' : 'IPython.parallel.controller.dictdb.NoDB',
133 }
133 }
134
134
135 class HubFactory(RegistrationFactory):
135 class HubFactory(RegistrationFactory):
136 """The Configurable for setting up a Hub."""
136 """The Configurable for setting up a Hub."""
137
137
138 # port-pairs for monitoredqueues:
138 # port-pairs for monitoredqueues:
139 hb = Tuple(Integer,Integer,config=True,
139 hb = Tuple(Integer,Integer,config=True,
140 help="""PUB/ROUTER Port pair for Engine heartbeats""")
140 help="""PUB/ROUTER Port pair for Engine heartbeats""")
141 def _hb_default(self):
141 def _hb_default(self):
142 return tuple(util.select_random_ports(2))
142 return tuple(util.select_random_ports(2))
143
143
144 mux = Tuple(Integer,Integer,config=True,
144 mux = Tuple(Integer,Integer,config=True,
145 help="""Client/Engine Port pair for MUX queue""")
145 help="""Client/Engine Port pair for MUX queue""")
146
146
147 def _mux_default(self):
147 def _mux_default(self):
148 return tuple(util.select_random_ports(2))
148 return tuple(util.select_random_ports(2))
149
149
150 task = Tuple(Integer,Integer,config=True,
150 task = Tuple(Integer,Integer,config=True,
151 help="""Client/Engine Port pair for Task queue""")
151 help="""Client/Engine Port pair for Task queue""")
152 def _task_default(self):
152 def _task_default(self):
153 return tuple(util.select_random_ports(2))
153 return tuple(util.select_random_ports(2))
154
154
155 control = Tuple(Integer,Integer,config=True,
155 control = Tuple(Integer,Integer,config=True,
156 help="""Client/Engine Port pair for Control queue""")
156 help="""Client/Engine Port pair for Control queue""")
157
157
158 def _control_default(self):
158 def _control_default(self):
159 return tuple(util.select_random_ports(2))
159 return tuple(util.select_random_ports(2))
160
160
161 iopub = Tuple(Integer,Integer,config=True,
161 iopub = Tuple(Integer,Integer,config=True,
162 help="""Client/Engine Port pair for IOPub relay""")
162 help="""Client/Engine Port pair for IOPub relay""")
163
163
164 def _iopub_default(self):
164 def _iopub_default(self):
165 return tuple(util.select_random_ports(2))
165 return tuple(util.select_random_ports(2))
166
166
167 # single ports:
167 # single ports:
168 mon_port = Integer(config=True,
168 mon_port = Integer(config=True,
169 help="""Monitor (SUB) port for queue traffic""")
169 help="""Monitor (SUB) port for queue traffic""")
170
170
171 def _mon_port_default(self):
171 def _mon_port_default(self):
172 return util.select_random_ports(1)[0]
172 return util.select_random_ports(1)[0]
173
173
174 notifier_port = Integer(config=True,
174 notifier_port = Integer(config=True,
175 help="""PUB port for sending engine status notifications""")
175 help="""PUB port for sending engine status notifications""")
176
176
177 def _notifier_port_default(self):
177 def _notifier_port_default(self):
178 return util.select_random_ports(1)[0]
178 return util.select_random_ports(1)[0]
179
179
180 engine_ip = Unicode(LOCALHOST, config=True,
180 engine_ip = Unicode(config=True,
181 help="IP on which to listen for engine connections. [default: loopback]")
181 help="IP on which to listen for engine connections. [default: loopback]")
182 def _engine_ip_default(self):
183 return localhost()
182 engine_transport = Unicode('tcp', config=True,
184 engine_transport = Unicode('tcp', config=True,
183 help="0MQ transport for engine connections. [default: tcp]")
185 help="0MQ transport for engine connections. [default: tcp]")
184
186
185 client_ip = Unicode(LOCALHOST, config=True,
187 client_ip = Unicode(config=True,
186 help="IP on which to listen for client connections. [default: loopback]")
188 help="IP on which to listen for client connections. [default: loopback]")
187 client_transport = Unicode('tcp', config=True,
189 client_transport = Unicode('tcp', config=True,
188 help="0MQ transport for client connections. [default : tcp]")
190 help="0MQ transport for client connections. [default : tcp]")
189
191
190 monitor_ip = Unicode(LOCALHOST, config=True,
192 monitor_ip = Unicode(config=True,
191 help="IP on which to listen for monitor messages. [default: loopback]")
193 help="IP on which to listen for monitor messages. [default: loopback]")
192 monitor_transport = Unicode('tcp', config=True,
194 monitor_transport = Unicode('tcp', config=True,
193 help="0MQ transport for monitor messages. [default : tcp]")
195 help="0MQ transport for monitor messages. [default : tcp]")
194
196
197 _client_ip_default = _monitor_ip_default = _engine_ip_default
198
199
195 monitor_url = Unicode('')
200 monitor_url = Unicode('')
196
201
197 db_class = DottedObjectName('NoDB',
202 db_class = DottedObjectName('NoDB',
198 config=True, help="""The class to use for the DB backend
203 config=True, help="""The class to use for the DB backend
199
204
200 Options include:
205 Options include:
201
206
202 SQLiteDB: SQLite
207 SQLiteDB: SQLite
203 MongoDB : use MongoDB
208 MongoDB : use MongoDB
204 DictDB : in-memory storage (fastest, but be mindful of memory growth of the Hub)
209 DictDB : in-memory storage (fastest, but be mindful of memory growth of the Hub)
205 NoDB : disable database altogether (default)
210 NoDB : disable database altogether (default)
206
211
207 """)
212 """)
208
213
209 # not configurable
214 # not configurable
210 db = Instance('IPython.parallel.controller.dictdb.BaseDB')
215 db = Instance('IPython.parallel.controller.dictdb.BaseDB')
211 heartmonitor = Instance('IPython.parallel.controller.heartmonitor.HeartMonitor')
216 heartmonitor = Instance('IPython.parallel.controller.heartmonitor.HeartMonitor')
212
217
213 def _ip_changed(self, name, old, new):
218 def _ip_changed(self, name, old, new):
214 self.engine_ip = new
219 self.engine_ip = new
215 self.client_ip = new
220 self.client_ip = new
216 self.monitor_ip = new
221 self.monitor_ip = new
217 self._update_monitor_url()
222 self._update_monitor_url()
218
223
219 def _update_monitor_url(self):
224 def _update_monitor_url(self):
220 self.monitor_url = "%s://%s:%i" % (self.monitor_transport, self.monitor_ip, self.mon_port)
225 self.monitor_url = "%s://%s:%i" % (self.monitor_transport, self.monitor_ip, self.mon_port)
221
226
222 def _transport_changed(self, name, old, new):
227 def _transport_changed(self, name, old, new):
223 self.engine_transport = new
228 self.engine_transport = new
224 self.client_transport = new
229 self.client_transport = new
225 self.monitor_transport = new
230 self.monitor_transport = new
226 self._update_monitor_url()
231 self._update_monitor_url()
227
232
228 def __init__(self, **kwargs):
233 def __init__(self, **kwargs):
229 super(HubFactory, self).__init__(**kwargs)
234 super(HubFactory, self).__init__(**kwargs)
230 self._update_monitor_url()
235 self._update_monitor_url()
231
236
232
237
233 def construct(self):
238 def construct(self):
234 self.init_hub()
239 self.init_hub()
235
240
236 def start(self):
241 def start(self):
237 self.heartmonitor.start()
242 self.heartmonitor.start()
238 self.log.info("Heartmonitor started")
243 self.log.info("Heartmonitor started")
239
244
240 def client_url(self, channel):
245 def client_url(self, channel):
241 """return full zmq url for a named client channel"""
246 """return full zmq url for a named client channel"""
242 return "%s://%s:%i" % (self.client_transport, self.client_ip, self.client_info[channel])
247 return "%s://%s:%i" % (self.client_transport, self.client_ip, self.client_info[channel])
243
248
244 def engine_url(self, channel):
249 def engine_url(self, channel):
245 """return full zmq url for a named engine channel"""
250 """return full zmq url for a named engine channel"""
246 return "%s://%s:%i" % (self.engine_transport, self.engine_ip, self.engine_info[channel])
251 return "%s://%s:%i" % (self.engine_transport, self.engine_ip, self.engine_info[channel])
247
252
248 def init_hub(self):
253 def init_hub(self):
249 """construct Hub object"""
254 """construct Hub object"""
250
255
251 ctx = self.context
256 ctx = self.context
252 loop = self.loop
257 loop = self.loop
253
258
254 try:
259 try:
255 scheme = self.config.TaskScheduler.scheme_name
260 scheme = self.config.TaskScheduler.scheme_name
256 except AttributeError:
261 except AttributeError:
257 from .scheduler import TaskScheduler
262 from .scheduler import TaskScheduler
258 scheme = TaskScheduler.scheme_name.get_default_value()
263 scheme = TaskScheduler.scheme_name.get_default_value()
259
264
260 # build connection dicts
265 # build connection dicts
261 engine = self.engine_info = {
266 engine = self.engine_info = {
262 'interface' : "%s://%s" % (self.engine_transport, self.engine_ip),
267 'interface' : "%s://%s" % (self.engine_transport, self.engine_ip),
263 'registration' : self.regport,
268 'registration' : self.regport,
264 'control' : self.control[1],
269 'control' : self.control[1],
265 'mux' : self.mux[1],
270 'mux' : self.mux[1],
266 'hb_ping' : self.hb[0],
271 'hb_ping' : self.hb[0],
267 'hb_pong' : self.hb[1],
272 'hb_pong' : self.hb[1],
268 'task' : self.task[1],
273 'task' : self.task[1],
269 'iopub' : self.iopub[1],
274 'iopub' : self.iopub[1],
270 }
275 }
271
276
272 client = self.client_info = {
277 client = self.client_info = {
273 'interface' : "%s://%s" % (self.client_transport, self.client_ip),
278 'interface' : "%s://%s" % (self.client_transport, self.client_ip),
274 'registration' : self.regport,
279 'registration' : self.regport,
275 'control' : self.control[0],
280 'control' : self.control[0],
276 'mux' : self.mux[0],
281 'mux' : self.mux[0],
277 'task' : self.task[0],
282 'task' : self.task[0],
278 'task_scheme' : scheme,
283 'task_scheme' : scheme,
279 'iopub' : self.iopub[0],
284 'iopub' : self.iopub[0],
280 'notification' : self.notifier_port,
285 'notification' : self.notifier_port,
281 }
286 }
282
287
283 self.log.debug("Hub engine addrs: %s", self.engine_info)
288 self.log.debug("Hub engine addrs: %s", self.engine_info)
284 self.log.debug("Hub client addrs: %s", self.client_info)
289 self.log.debug("Hub client addrs: %s", self.client_info)
285
290
286 # Registrar socket
291 # Registrar socket
287 q = ZMQStream(ctx.socket(zmq.ROUTER), loop)
292 q = ZMQStream(ctx.socket(zmq.ROUTER), loop)
288 util.set_hwm(q, 0)
293 util.set_hwm(q, 0)
289 q.bind(self.client_url('registration'))
294 q.bind(self.client_url('registration'))
290 self.log.info("Hub listening on %s for registration.", self.client_url('registration'))
295 self.log.info("Hub listening on %s for registration.", self.client_url('registration'))
291 if self.client_ip != self.engine_ip:
296 if self.client_ip != self.engine_ip:
292 q.bind(self.engine_url('registration'))
297 q.bind(self.engine_url('registration'))
293 self.log.info("Hub listening on %s for registration.", self.engine_url('registration'))
298 self.log.info("Hub listening on %s for registration.", self.engine_url('registration'))
294
299
295 ### Engine connections ###
300 ### Engine connections ###
296
301
297 # heartbeat
302 # heartbeat
298 hpub = ctx.socket(zmq.PUB)
303 hpub = ctx.socket(zmq.PUB)
299 hpub.bind(self.engine_url('hb_ping'))
304 hpub.bind(self.engine_url('hb_ping'))
300 hrep = ctx.socket(zmq.ROUTER)
305 hrep = ctx.socket(zmq.ROUTER)
301 util.set_hwm(hrep, 0)
306 util.set_hwm(hrep, 0)
302 hrep.bind(self.engine_url('hb_pong'))
307 hrep.bind(self.engine_url('hb_pong'))
303 self.heartmonitor = HeartMonitor(loop=loop, parent=self, log=self.log,
308 self.heartmonitor = HeartMonitor(loop=loop, parent=self, log=self.log,
304 pingstream=ZMQStream(hpub,loop),
309 pingstream=ZMQStream(hpub,loop),
305 pongstream=ZMQStream(hrep,loop)
310 pongstream=ZMQStream(hrep,loop)
306 )
311 )
307
312
308 ### Client connections ###
313 ### Client connections ###
309
314
310 # Notifier socket
315 # Notifier socket
311 n = ZMQStream(ctx.socket(zmq.PUB), loop)
316 n = ZMQStream(ctx.socket(zmq.PUB), loop)
312 n.bind(self.client_url('notification'))
317 n.bind(self.client_url('notification'))
313
318
314 ### build and launch the queues ###
319 ### build and launch the queues ###
315
320
316 # monitor socket
321 # monitor socket
317 sub = ctx.socket(zmq.SUB)
322 sub = ctx.socket(zmq.SUB)
318 sub.setsockopt(zmq.SUBSCRIBE, b"")
323 sub.setsockopt(zmq.SUBSCRIBE, b"")
319 sub.bind(self.monitor_url)
324 sub.bind(self.monitor_url)
320 sub.bind('inproc://monitor')
325 sub.bind('inproc://monitor')
321 sub = ZMQStream(sub, loop)
326 sub = ZMQStream(sub, loop)
322
327
323 # connect the db
328 # connect the db
324 db_class = _db_shortcuts.get(self.db_class.lower(), self.db_class)
329 db_class = _db_shortcuts.get(self.db_class.lower(), self.db_class)
325 self.log.info('Hub using DB backend: %r', (db_class.split('.')[-1]))
330 self.log.info('Hub using DB backend: %r', (db_class.split('.')[-1]))
326 self.db = import_item(str(db_class))(session=self.session.session,
331 self.db = import_item(str(db_class))(session=self.session.session,
327 parent=self, log=self.log)
332 parent=self, log=self.log)
328 time.sleep(.25)
333 time.sleep(.25)
329
334
330 # resubmit stream
335 # resubmit stream
331 r = ZMQStream(ctx.socket(zmq.DEALER), loop)
336 r = ZMQStream(ctx.socket(zmq.DEALER), loop)
332 url = util.disambiguate_url(self.client_url('task'))
337 url = util.disambiguate_url(self.client_url('task'))
333 r.connect(url)
338 r.connect(url)
334
339
335 self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor,
340 self.hub = Hub(loop=loop, session=self.session, monitor=sub, heartmonitor=self.heartmonitor,
336 query=q, notifier=n, resubmit=r, db=self.db,
341 query=q, notifier=n, resubmit=r, db=self.db,
337 engine_info=self.engine_info, client_info=self.client_info,
342 engine_info=self.engine_info, client_info=self.client_info,
338 log=self.log)
343 log=self.log)
339
344
340
345
341 class Hub(SessionFactory):
346 class Hub(SessionFactory):
342 """The IPython Controller Hub with 0MQ connections
347 """The IPython Controller Hub with 0MQ connections
343
348
344 Parameters
349 Parameters
345 ==========
350 ==========
346 loop: zmq IOLoop instance
351 loop: zmq IOLoop instance
347 session: Session object
352 session: Session object
348 <removed> context: zmq context for creating new connections (?)
353 <removed> context: zmq context for creating new connections (?)
349 queue: ZMQStream for monitoring the command queue (SUB)
354 queue: ZMQStream for monitoring the command queue (SUB)
350 query: ZMQStream for engine registration and client queries requests (ROUTER)
355 query: ZMQStream for engine registration and client queries requests (ROUTER)
351 heartbeat: HeartMonitor object checking the pulse of the engines
356 heartbeat: HeartMonitor object checking the pulse of the engines
352 notifier: ZMQStream for broadcasting engine registration changes (PUB)
357 notifier: ZMQStream for broadcasting engine registration changes (PUB)
353 db: connection to db for out of memory logging of commands
358 db: connection to db for out of memory logging of commands
354 NotImplemented
359 NotImplemented
355 engine_info: dict of zmq connection information for engines to connect
360 engine_info: dict of zmq connection information for engines to connect
356 to the queues.
361 to the queues.
357 client_info: dict of zmq connection information for engines to connect
362 client_info: dict of zmq connection information for engines to connect
358 to the queues.
363 to the queues.
359 """
364 """
360
365
361 engine_state_file = Unicode()
366 engine_state_file = Unicode()
362
367
363 # internal data structures:
368 # internal data structures:
364 ids=Set() # engine IDs
369 ids=Set() # engine IDs
365 keytable=Dict()
370 keytable=Dict()
366 by_ident=Dict()
371 by_ident=Dict()
367 engines=Dict()
372 engines=Dict()
368 clients=Dict()
373 clients=Dict()
369 hearts=Dict()
374 hearts=Dict()
370 pending=Set()
375 pending=Set()
371 queues=Dict() # pending msg_ids keyed by engine_id
376 queues=Dict() # pending msg_ids keyed by engine_id
372 tasks=Dict() # pending msg_ids submitted as tasks, keyed by client_id
377 tasks=Dict() # pending msg_ids submitted as tasks, keyed by client_id
373 completed=Dict() # completed msg_ids keyed by engine_id
378 completed=Dict() # completed msg_ids keyed by engine_id
374 all_completed=Set() # completed msg_ids keyed by engine_id
379 all_completed=Set() # completed msg_ids keyed by engine_id
375 dead_engines=Set() # completed msg_ids keyed by engine_id
380 dead_engines=Set() # completed msg_ids keyed by engine_id
376 unassigned=Set() # set of task msg_ds not yet assigned a destination
381 unassigned=Set() # set of task msg_ds not yet assigned a destination
377 incoming_registrations=Dict()
382 incoming_registrations=Dict()
378 registration_timeout=Integer()
383 registration_timeout=Integer()
379 _idcounter=Integer(0)
384 _idcounter=Integer(0)
380
385
381 # objects from constructor:
386 # objects from constructor:
382 query=Instance(ZMQStream)
387 query=Instance(ZMQStream)
383 monitor=Instance(ZMQStream)
388 monitor=Instance(ZMQStream)
384 notifier=Instance(ZMQStream)
389 notifier=Instance(ZMQStream)
385 resubmit=Instance(ZMQStream)
390 resubmit=Instance(ZMQStream)
386 heartmonitor=Instance(HeartMonitor)
391 heartmonitor=Instance(HeartMonitor)
387 db=Instance(object)
392 db=Instance(object)
388 client_info=Dict()
393 client_info=Dict()
389 engine_info=Dict()
394 engine_info=Dict()
390
395
391
396
392 def __init__(self, **kwargs):
397 def __init__(self, **kwargs):
393 """
398 """
394 # universal:
399 # universal:
395 loop: IOLoop for creating future connections
400 loop: IOLoop for creating future connections
396 session: streamsession for sending serialized data
401 session: streamsession for sending serialized data
397 # engine:
402 # engine:
398 queue: ZMQStream for monitoring queue messages
403 queue: ZMQStream for monitoring queue messages
399 query: ZMQStream for engine+client registration and client requests
404 query: ZMQStream for engine+client registration and client requests
400 heartbeat: HeartMonitor object for tracking engines
405 heartbeat: HeartMonitor object for tracking engines
401 # extra:
406 # extra:
402 db: ZMQStream for db connection (NotImplemented)
407 db: ZMQStream for db connection (NotImplemented)
403 engine_info: zmq address/protocol dict for engine connections
408 engine_info: zmq address/protocol dict for engine connections
404 client_info: zmq address/protocol dict for client connections
409 client_info: zmq address/protocol dict for client connections
405 """
410 """
406
411
407 super(Hub, self).__init__(**kwargs)
412 super(Hub, self).__init__(**kwargs)
408 self.registration_timeout = max(10000, 5*self.heartmonitor.period)
413 self.registration_timeout = max(10000, 5*self.heartmonitor.period)
409
414
410 # register our callbacks
415 # register our callbacks
411 self.query.on_recv(self.dispatch_query)
416 self.query.on_recv(self.dispatch_query)
412 self.monitor.on_recv(self.dispatch_monitor_traffic)
417 self.monitor.on_recv(self.dispatch_monitor_traffic)
413
418
414 self.heartmonitor.add_heart_failure_handler(self.handle_heart_failure)
419 self.heartmonitor.add_heart_failure_handler(self.handle_heart_failure)
415 self.heartmonitor.add_new_heart_handler(self.handle_new_heart)
420 self.heartmonitor.add_new_heart_handler(self.handle_new_heart)
416
421
417 self.monitor_handlers = {b'in' : self.save_queue_request,
422 self.monitor_handlers = {b'in' : self.save_queue_request,
418 b'out': self.save_queue_result,
423 b'out': self.save_queue_result,
419 b'intask': self.save_task_request,
424 b'intask': self.save_task_request,
420 b'outtask': self.save_task_result,
425 b'outtask': self.save_task_result,
421 b'tracktask': self.save_task_destination,
426 b'tracktask': self.save_task_destination,
422 b'incontrol': _passer,
427 b'incontrol': _passer,
423 b'outcontrol': _passer,
428 b'outcontrol': _passer,
424 b'iopub': self.save_iopub_message,
429 b'iopub': self.save_iopub_message,
425 }
430 }
426
431
427 self.query_handlers = {'queue_request': self.queue_status,
432 self.query_handlers = {'queue_request': self.queue_status,
428 'result_request': self.get_results,
433 'result_request': self.get_results,
429 'history_request': self.get_history,
434 'history_request': self.get_history,
430 'db_request': self.db_query,
435 'db_request': self.db_query,
431 'purge_request': self.purge_results,
436 'purge_request': self.purge_results,
432 'load_request': self.check_load,
437 'load_request': self.check_load,
433 'resubmit_request': self.resubmit_task,
438 'resubmit_request': self.resubmit_task,
434 'shutdown_request': self.shutdown_request,
439 'shutdown_request': self.shutdown_request,
435 'registration_request' : self.register_engine,
440 'registration_request' : self.register_engine,
436 'unregistration_request' : self.unregister_engine,
441 'unregistration_request' : self.unregister_engine,
437 'connection_request': self.connection_request,
442 'connection_request': self.connection_request,
438 }
443 }
439
444
440 # ignore resubmit replies
445 # ignore resubmit replies
441 self.resubmit.on_recv(lambda msg: None, copy=False)
446 self.resubmit.on_recv(lambda msg: None, copy=False)
442
447
443 self.log.info("hub::created hub")
448 self.log.info("hub::created hub")
444
449
445 @property
450 @property
446 def _next_id(self):
451 def _next_id(self):
447 """gemerate a new ID.
452 """gemerate a new ID.
448
453
449 No longer reuse old ids, just count from 0."""
454 No longer reuse old ids, just count from 0."""
450 newid = self._idcounter
455 newid = self._idcounter
451 self._idcounter += 1
456 self._idcounter += 1
452 return newid
457 return newid
453 # newid = 0
458 # newid = 0
454 # incoming = [id[0] for id in self.incoming_registrations.itervalues()]
459 # incoming = [id[0] for id in self.incoming_registrations.itervalues()]
455 # # print newid, self.ids, self.incoming_registrations
460 # # print newid, self.ids, self.incoming_registrations
456 # while newid in self.ids or newid in incoming:
461 # while newid in self.ids or newid in incoming:
457 # newid += 1
462 # newid += 1
458 # return newid
463 # return newid
459
464
460 #-----------------------------------------------------------------------------
465 #-----------------------------------------------------------------------------
461 # message validation
466 # message validation
462 #-----------------------------------------------------------------------------
467 #-----------------------------------------------------------------------------
463
468
464 def _validate_targets(self, targets):
469 def _validate_targets(self, targets):
465 """turn any valid targets argument into a list of integer ids"""
470 """turn any valid targets argument into a list of integer ids"""
466 if targets is None:
471 if targets is None:
467 # default to all
472 # default to all
468 return self.ids
473 return self.ids
469
474
470 if isinstance(targets, (int,str,unicode)):
475 if isinstance(targets, (int,str,unicode)):
471 # only one target specified
476 # only one target specified
472 targets = [targets]
477 targets = [targets]
473 _targets = []
478 _targets = []
474 for t in targets:
479 for t in targets:
475 # map raw identities to ids
480 # map raw identities to ids
476 if isinstance(t, (str,unicode)):
481 if isinstance(t, (str,unicode)):
477 t = self.by_ident.get(cast_bytes(t), t)
482 t = self.by_ident.get(cast_bytes(t), t)
478 _targets.append(t)
483 _targets.append(t)
479 targets = _targets
484 targets = _targets
480 bad_targets = [ t for t in targets if t not in self.ids ]
485 bad_targets = [ t for t in targets if t not in self.ids ]
481 if bad_targets:
486 if bad_targets:
482 raise IndexError("No Such Engine: %r" % bad_targets)
487 raise IndexError("No Such Engine: %r" % bad_targets)
483 if not targets:
488 if not targets:
484 raise IndexError("No Engines Registered")
489 raise IndexError("No Engines Registered")
485 return targets
490 return targets
486
491
487 #-----------------------------------------------------------------------------
492 #-----------------------------------------------------------------------------
488 # dispatch methods (1 per stream)
493 # dispatch methods (1 per stream)
489 #-----------------------------------------------------------------------------
494 #-----------------------------------------------------------------------------
490
495
491
496
492 @util.log_errors
497 @util.log_errors
493 def dispatch_monitor_traffic(self, msg):
498 def dispatch_monitor_traffic(self, msg):
494 """all ME and Task queue messages come through here, as well as
499 """all ME and Task queue messages come through here, as well as
495 IOPub traffic."""
500 IOPub traffic."""
496 self.log.debug("monitor traffic: %r", msg[0])
501 self.log.debug("monitor traffic: %r", msg[0])
497 switch = msg[0]
502 switch = msg[0]
498 try:
503 try:
499 idents, msg = self.session.feed_identities(msg[1:])
504 idents, msg = self.session.feed_identities(msg[1:])
500 except ValueError:
505 except ValueError:
501 idents=[]
506 idents=[]
502 if not idents:
507 if not idents:
503 self.log.error("Monitor message without topic: %r", msg)
508 self.log.error("Monitor message without topic: %r", msg)
504 return
509 return
505 handler = self.monitor_handlers.get(switch, None)
510 handler = self.monitor_handlers.get(switch, None)
506 if handler is not None:
511 if handler is not None:
507 handler(idents, msg)
512 handler(idents, msg)
508 else:
513 else:
509 self.log.error("Unrecognized monitor topic: %r", switch)
514 self.log.error("Unrecognized monitor topic: %r", switch)
510
515
511
516
512 @util.log_errors
517 @util.log_errors
513 def dispatch_query(self, msg):
518 def dispatch_query(self, msg):
514 """Route registration requests and queries from clients."""
519 """Route registration requests and queries from clients."""
515 try:
520 try:
516 idents, msg = self.session.feed_identities(msg)
521 idents, msg = self.session.feed_identities(msg)
517 except ValueError:
522 except ValueError:
518 idents = []
523 idents = []
519 if not idents:
524 if not idents:
520 self.log.error("Bad Query Message: %r", msg)
525 self.log.error("Bad Query Message: %r", msg)
521 return
526 return
522 client_id = idents[0]
527 client_id = idents[0]
523 try:
528 try:
524 msg = self.session.unserialize(msg, content=True)
529 msg = self.session.unserialize(msg, content=True)
525 except Exception:
530 except Exception:
526 content = error.wrap_exception()
531 content = error.wrap_exception()
527 self.log.error("Bad Query Message: %r", msg, exc_info=True)
532 self.log.error("Bad Query Message: %r", msg, exc_info=True)
528 self.session.send(self.query, "hub_error", ident=client_id,
533 self.session.send(self.query, "hub_error", ident=client_id,
529 content=content)
534 content=content)
530 return
535 return
531 # print client_id, header, parent, content
536 # print client_id, header, parent, content
532 #switch on message type:
537 #switch on message type:
533 msg_type = msg['header']['msg_type']
538 msg_type = msg['header']['msg_type']
534 self.log.info("client::client %r requested %r", client_id, msg_type)
539 self.log.info("client::client %r requested %r", client_id, msg_type)
535 handler = self.query_handlers.get(msg_type, None)
540 handler = self.query_handlers.get(msg_type, None)
536 try:
541 try:
537 assert handler is not None, "Bad Message Type: %r" % msg_type
542 assert handler is not None, "Bad Message Type: %r" % msg_type
538 except:
543 except:
539 content = error.wrap_exception()
544 content = error.wrap_exception()
540 self.log.error("Bad Message Type: %r", msg_type, exc_info=True)
545 self.log.error("Bad Message Type: %r", msg_type, exc_info=True)
541 self.session.send(self.query, "hub_error", ident=client_id,
546 self.session.send(self.query, "hub_error", ident=client_id,
542 content=content)
547 content=content)
543 return
548 return
544
549
545 else:
550 else:
546 handler(idents, msg)
551 handler(idents, msg)
547
552
548 def dispatch_db(self, msg):
553 def dispatch_db(self, msg):
549 """"""
554 """"""
550 raise NotImplementedError
555 raise NotImplementedError
551
556
552 #---------------------------------------------------------------------------
557 #---------------------------------------------------------------------------
553 # handler methods (1 per event)
558 # handler methods (1 per event)
554 #---------------------------------------------------------------------------
559 #---------------------------------------------------------------------------
555
560
556 #----------------------- Heartbeat --------------------------------------
561 #----------------------- Heartbeat --------------------------------------
557
562
558 def handle_new_heart(self, heart):
563 def handle_new_heart(self, heart):
559 """handler to attach to heartbeater.
564 """handler to attach to heartbeater.
560 Called when a new heart starts to beat.
565 Called when a new heart starts to beat.
561 Triggers completion of registration."""
566 Triggers completion of registration."""
562 self.log.debug("heartbeat::handle_new_heart(%r)", heart)
567 self.log.debug("heartbeat::handle_new_heart(%r)", heart)
563 if heart not in self.incoming_registrations:
568 if heart not in self.incoming_registrations:
564 self.log.info("heartbeat::ignoring new heart: %r", heart)
569 self.log.info("heartbeat::ignoring new heart: %r", heart)
565 else:
570 else:
566 self.finish_registration(heart)
571 self.finish_registration(heart)
567
572
568
573
569 def handle_heart_failure(self, heart):
574 def handle_heart_failure(self, heart):
570 """handler to attach to heartbeater.
575 """handler to attach to heartbeater.
571 called when a previously registered heart fails to respond to beat request.
576 called when a previously registered heart fails to respond to beat request.
572 triggers unregistration"""
577 triggers unregistration"""
573 self.log.debug("heartbeat::handle_heart_failure(%r)", heart)
578 self.log.debug("heartbeat::handle_heart_failure(%r)", heart)
574 eid = self.hearts.get(heart, None)
579 eid = self.hearts.get(heart, None)
575 uuid = self.engines[eid].uuid
580 uuid = self.engines[eid].uuid
576 if eid is None or self.keytable[eid] in self.dead_engines:
581 if eid is None or self.keytable[eid] in self.dead_engines:
577 self.log.info("heartbeat::ignoring heart failure %r (not an engine or already dead)", heart)
582 self.log.info("heartbeat::ignoring heart failure %r (not an engine or already dead)", heart)
578 else:
583 else:
579 self.unregister_engine(heart, dict(content=dict(id=eid, queue=uuid)))
584 self.unregister_engine(heart, dict(content=dict(id=eid, queue=uuid)))
580
585
581 #----------------------- MUX Queue Traffic ------------------------------
586 #----------------------- MUX Queue Traffic ------------------------------
582
587
583 def save_queue_request(self, idents, msg):
588 def save_queue_request(self, idents, msg):
584 if len(idents) < 2:
589 if len(idents) < 2:
585 self.log.error("invalid identity prefix: %r", idents)
590 self.log.error("invalid identity prefix: %r", idents)
586 return
591 return
587 queue_id, client_id = idents[:2]
592 queue_id, client_id = idents[:2]
588 try:
593 try:
589 msg = self.session.unserialize(msg)
594 msg = self.session.unserialize(msg)
590 except Exception:
595 except Exception:
591 self.log.error("queue::client %r sent invalid message to %r: %r", client_id, queue_id, msg, exc_info=True)
596 self.log.error("queue::client %r sent invalid message to %r: %r", client_id, queue_id, msg, exc_info=True)
592 return
597 return
593
598
594 eid = self.by_ident.get(queue_id, None)
599 eid = self.by_ident.get(queue_id, None)
595 if eid is None:
600 if eid is None:
596 self.log.error("queue::target %r not registered", queue_id)
601 self.log.error("queue::target %r not registered", queue_id)
597 self.log.debug("queue:: valid are: %r", self.by_ident.keys())
602 self.log.debug("queue:: valid are: %r", self.by_ident.keys())
598 return
603 return
599 record = init_record(msg)
604 record = init_record(msg)
600 msg_id = record['msg_id']
605 msg_id = record['msg_id']
601 self.log.info("queue::client %r submitted request %r to %s", client_id, msg_id, eid)
606 self.log.info("queue::client %r submitted request %r to %s", client_id, msg_id, eid)
602 # Unicode in records
607 # Unicode in records
603 record['engine_uuid'] = queue_id.decode('ascii')
608 record['engine_uuid'] = queue_id.decode('ascii')
604 record['client_uuid'] = msg['header']['session']
609 record['client_uuid'] = msg['header']['session']
605 record['queue'] = 'mux'
610 record['queue'] = 'mux'
606
611
607 try:
612 try:
608 # it's posible iopub arrived first:
613 # it's posible iopub arrived first:
609 existing = self.db.get_record(msg_id)
614 existing = self.db.get_record(msg_id)
610 for key,evalue in existing.iteritems():
615 for key,evalue in existing.iteritems():
611 rvalue = record.get(key, None)
616 rvalue = record.get(key, None)
612 if evalue and rvalue and evalue != rvalue:
617 if evalue and rvalue and evalue != rvalue:
613 self.log.warn("conflicting initial state for record: %r:%r <%r> %r", msg_id, rvalue, key, evalue)
618 self.log.warn("conflicting initial state for record: %r:%r <%r> %r", msg_id, rvalue, key, evalue)
614 elif evalue and not rvalue:
619 elif evalue and not rvalue:
615 record[key] = evalue
620 record[key] = evalue
616 try:
621 try:
617 self.db.update_record(msg_id, record)
622 self.db.update_record(msg_id, record)
618 except Exception:
623 except Exception:
619 self.log.error("DB Error updating record %r", msg_id, exc_info=True)
624 self.log.error("DB Error updating record %r", msg_id, exc_info=True)
620 except KeyError:
625 except KeyError:
621 try:
626 try:
622 self.db.add_record(msg_id, record)
627 self.db.add_record(msg_id, record)
623 except Exception:
628 except Exception:
624 self.log.error("DB Error adding record %r", msg_id, exc_info=True)
629 self.log.error("DB Error adding record %r", msg_id, exc_info=True)
625
630
626
631
627 self.pending.add(msg_id)
632 self.pending.add(msg_id)
628 self.queues[eid].append(msg_id)
633 self.queues[eid].append(msg_id)
629
634
630 def save_queue_result(self, idents, msg):
635 def save_queue_result(self, idents, msg):
631 if len(idents) < 2:
636 if len(idents) < 2:
632 self.log.error("invalid identity prefix: %r", idents)
637 self.log.error("invalid identity prefix: %r", idents)
633 return
638 return
634
639
635 client_id, queue_id = idents[:2]
640 client_id, queue_id = idents[:2]
636 try:
641 try:
637 msg = self.session.unserialize(msg)
642 msg = self.session.unserialize(msg)
638 except Exception:
643 except Exception:
639 self.log.error("queue::engine %r sent invalid message to %r: %r",
644 self.log.error("queue::engine %r sent invalid message to %r: %r",
640 queue_id, client_id, msg, exc_info=True)
645 queue_id, client_id, msg, exc_info=True)
641 return
646 return
642
647
643 eid = self.by_ident.get(queue_id, None)
648 eid = self.by_ident.get(queue_id, None)
644 if eid is None:
649 if eid is None:
645 self.log.error("queue::unknown engine %r is sending a reply: ", queue_id)
650 self.log.error("queue::unknown engine %r is sending a reply: ", queue_id)
646 return
651 return
647
652
648 parent = msg['parent_header']
653 parent = msg['parent_header']
649 if not parent:
654 if not parent:
650 return
655 return
651 msg_id = parent['msg_id']
656 msg_id = parent['msg_id']
652 if msg_id in self.pending:
657 if msg_id in self.pending:
653 self.pending.remove(msg_id)
658 self.pending.remove(msg_id)
654 self.all_completed.add(msg_id)
659 self.all_completed.add(msg_id)
655 self.queues[eid].remove(msg_id)
660 self.queues[eid].remove(msg_id)
656 self.completed[eid].append(msg_id)
661 self.completed[eid].append(msg_id)
657 self.log.info("queue::request %r completed on %s", msg_id, eid)
662 self.log.info("queue::request %r completed on %s", msg_id, eid)
658 elif msg_id not in self.all_completed:
663 elif msg_id not in self.all_completed:
659 # it could be a result from a dead engine that died before delivering the
664 # it could be a result from a dead engine that died before delivering the
660 # result
665 # result
661 self.log.warn("queue:: unknown msg finished %r", msg_id)
666 self.log.warn("queue:: unknown msg finished %r", msg_id)
662 return
667 return
663 # update record anyway, because the unregistration could have been premature
668 # update record anyway, because the unregistration could have been premature
664 rheader = msg['header']
669 rheader = msg['header']
665 md = msg['metadata']
670 md = msg['metadata']
666 completed = rheader['date']
671 completed = rheader['date']
667 started = md.get('started', None)
672 started = md.get('started', None)
668 result = {
673 result = {
669 'result_header' : rheader,
674 'result_header' : rheader,
670 'result_metadata': md,
675 'result_metadata': md,
671 'result_content': msg['content'],
676 'result_content': msg['content'],
672 'received': datetime.now(),
677 'received': datetime.now(),
673 'started' : started,
678 'started' : started,
674 'completed' : completed
679 'completed' : completed
675 }
680 }
676
681
677 result['result_buffers'] = msg['buffers']
682 result['result_buffers'] = msg['buffers']
678 try:
683 try:
679 self.db.update_record(msg_id, result)
684 self.db.update_record(msg_id, result)
680 except Exception:
685 except Exception:
681 self.log.error("DB Error updating record %r", msg_id, exc_info=True)
686 self.log.error("DB Error updating record %r", msg_id, exc_info=True)
682
687
683
688
684 #--------------------- Task Queue Traffic ------------------------------
689 #--------------------- Task Queue Traffic ------------------------------
685
690
686 def save_task_request(self, idents, msg):
691 def save_task_request(self, idents, msg):
687 """Save the submission of a task."""
692 """Save the submission of a task."""
688 client_id = idents[0]
693 client_id = idents[0]
689
694
690 try:
695 try:
691 msg = self.session.unserialize(msg)
696 msg = self.session.unserialize(msg)
692 except Exception:
697 except Exception:
693 self.log.error("task::client %r sent invalid task message: %r",
698 self.log.error("task::client %r sent invalid task message: %r",
694 client_id, msg, exc_info=True)
699 client_id, msg, exc_info=True)
695 return
700 return
696 record = init_record(msg)
701 record = init_record(msg)
697
702
698 record['client_uuid'] = msg['header']['session']
703 record['client_uuid'] = msg['header']['session']
699 record['queue'] = 'task'
704 record['queue'] = 'task'
700 header = msg['header']
705 header = msg['header']
701 msg_id = header['msg_id']
706 msg_id = header['msg_id']
702 self.pending.add(msg_id)
707 self.pending.add(msg_id)
703 self.unassigned.add(msg_id)
708 self.unassigned.add(msg_id)
704 try:
709 try:
705 # it's posible iopub arrived first:
710 # it's posible iopub arrived first:
706 existing = self.db.get_record(msg_id)
711 existing = self.db.get_record(msg_id)
707 if existing['resubmitted']:
712 if existing['resubmitted']:
708 for key in ('submitted', 'client_uuid', 'buffers'):
713 for key in ('submitted', 'client_uuid', 'buffers'):
709 # don't clobber these keys on resubmit
714 # don't clobber these keys on resubmit
710 # submitted and client_uuid should be different
715 # submitted and client_uuid should be different
711 # and buffers might be big, and shouldn't have changed
716 # and buffers might be big, and shouldn't have changed
712 record.pop(key)
717 record.pop(key)
713 # still check content,header which should not change
718 # still check content,header which should not change
714 # but are not expensive to compare as buffers
719 # but are not expensive to compare as buffers
715
720
716 for key,evalue in existing.iteritems():
721 for key,evalue in existing.iteritems():
717 if key.endswith('buffers'):
722 if key.endswith('buffers'):
718 # don't compare buffers
723 # don't compare buffers
719 continue
724 continue
720 rvalue = record.get(key, None)
725 rvalue = record.get(key, None)
721 if evalue and rvalue and evalue != rvalue:
726 if evalue and rvalue and evalue != rvalue:
722 self.log.warn("conflicting initial state for record: %r:%r <%r> %r", msg_id, rvalue, key, evalue)
727 self.log.warn("conflicting initial state for record: %r:%r <%r> %r", msg_id, rvalue, key, evalue)
723 elif evalue and not rvalue:
728 elif evalue and not rvalue:
724 record[key] = evalue
729 record[key] = evalue
725 try:
730 try:
726 self.db.update_record(msg_id, record)
731 self.db.update_record(msg_id, record)
727 except Exception:
732 except Exception:
728 self.log.error("DB Error updating record %r", msg_id, exc_info=True)
733 self.log.error("DB Error updating record %r", msg_id, exc_info=True)
729 except KeyError:
734 except KeyError:
730 try:
735 try:
731 self.db.add_record(msg_id, record)
736 self.db.add_record(msg_id, record)
732 except Exception:
737 except Exception:
733 self.log.error("DB Error adding record %r", msg_id, exc_info=True)
738 self.log.error("DB Error adding record %r", msg_id, exc_info=True)
734 except Exception:
739 except Exception:
735 self.log.error("DB Error saving task request %r", msg_id, exc_info=True)
740 self.log.error("DB Error saving task request %r", msg_id, exc_info=True)
736
741
737 def save_task_result(self, idents, msg):
742 def save_task_result(self, idents, msg):
738 """save the result of a completed task."""
743 """save the result of a completed task."""
739 client_id = idents[0]
744 client_id = idents[0]
740 try:
745 try:
741 msg = self.session.unserialize(msg)
746 msg = self.session.unserialize(msg)
742 except Exception:
747 except Exception:
743 self.log.error("task::invalid task result message send to %r: %r",
748 self.log.error("task::invalid task result message send to %r: %r",
744 client_id, msg, exc_info=True)
749 client_id, msg, exc_info=True)
745 return
750 return
746
751
747 parent = msg['parent_header']
752 parent = msg['parent_header']
748 if not parent:
753 if not parent:
749 # print msg
754 # print msg
750 self.log.warn("Task %r had no parent!", msg)
755 self.log.warn("Task %r had no parent!", msg)
751 return
756 return
752 msg_id = parent['msg_id']
757 msg_id = parent['msg_id']
753 if msg_id in self.unassigned:
758 if msg_id in self.unassigned:
754 self.unassigned.remove(msg_id)
759 self.unassigned.remove(msg_id)
755
760
756 header = msg['header']
761 header = msg['header']
757 md = msg['metadata']
762 md = msg['metadata']
758 engine_uuid = md.get('engine', u'')
763 engine_uuid = md.get('engine', u'')
759 eid = self.by_ident.get(cast_bytes(engine_uuid), None)
764 eid = self.by_ident.get(cast_bytes(engine_uuid), None)
760
765
761 status = md.get('status', None)
766 status = md.get('status', None)
762
767
763 if msg_id in self.pending:
768 if msg_id in self.pending:
764 self.log.info("task::task %r finished on %s", msg_id, eid)
769 self.log.info("task::task %r finished on %s", msg_id, eid)
765 self.pending.remove(msg_id)
770 self.pending.remove(msg_id)
766 self.all_completed.add(msg_id)
771 self.all_completed.add(msg_id)
767 if eid is not None:
772 if eid is not None:
768 if status != 'aborted':
773 if status != 'aborted':
769 self.completed[eid].append(msg_id)
774 self.completed[eid].append(msg_id)
770 if msg_id in self.tasks[eid]:
775 if msg_id in self.tasks[eid]:
771 self.tasks[eid].remove(msg_id)
776 self.tasks[eid].remove(msg_id)
772 completed = header['date']
777 completed = header['date']
773 started = md.get('started', None)
778 started = md.get('started', None)
774 result = {
779 result = {
775 'result_header' : header,
780 'result_header' : header,
776 'result_metadata': msg['metadata'],
781 'result_metadata': msg['metadata'],
777 'result_content': msg['content'],
782 'result_content': msg['content'],
778 'started' : started,
783 'started' : started,
779 'completed' : completed,
784 'completed' : completed,
780 'received' : datetime.now(),
785 'received' : datetime.now(),
781 'engine_uuid': engine_uuid,
786 'engine_uuid': engine_uuid,
782 }
787 }
783
788
784 result['result_buffers'] = msg['buffers']
789 result['result_buffers'] = msg['buffers']
785 try:
790 try:
786 self.db.update_record(msg_id, result)
791 self.db.update_record(msg_id, result)
787 except Exception:
792 except Exception:
788 self.log.error("DB Error saving task request %r", msg_id, exc_info=True)
793 self.log.error("DB Error saving task request %r", msg_id, exc_info=True)
789
794
790 else:
795 else:
791 self.log.debug("task::unknown task %r finished", msg_id)
796 self.log.debug("task::unknown task %r finished", msg_id)
792
797
793 def save_task_destination(self, idents, msg):
798 def save_task_destination(self, idents, msg):
794 try:
799 try:
795 msg = self.session.unserialize(msg, content=True)
800 msg = self.session.unserialize(msg, content=True)
796 except Exception:
801 except Exception:
797 self.log.error("task::invalid task tracking message", exc_info=True)
802 self.log.error("task::invalid task tracking message", exc_info=True)
798 return
803 return
799 content = msg['content']
804 content = msg['content']
800 # print (content)
805 # print (content)
801 msg_id = content['msg_id']
806 msg_id = content['msg_id']
802 engine_uuid = content['engine_id']
807 engine_uuid = content['engine_id']
803 eid = self.by_ident[cast_bytes(engine_uuid)]
808 eid = self.by_ident[cast_bytes(engine_uuid)]
804
809
805 self.log.info("task::task %r arrived on %r", msg_id, eid)
810 self.log.info("task::task %r arrived on %r", msg_id, eid)
806 if msg_id in self.unassigned:
811 if msg_id in self.unassigned:
807 self.unassigned.remove(msg_id)
812 self.unassigned.remove(msg_id)
808 # else:
813 # else:
809 # self.log.debug("task::task %r not listed as MIA?!"%(msg_id))
814 # self.log.debug("task::task %r not listed as MIA?!"%(msg_id))
810
815
811 self.tasks[eid].append(msg_id)
816 self.tasks[eid].append(msg_id)
812 # self.pending[msg_id][1].update(received=datetime.now(),engine=(eid,engine_uuid))
817 # self.pending[msg_id][1].update(received=datetime.now(),engine=(eid,engine_uuid))
813 try:
818 try:
814 self.db.update_record(msg_id, dict(engine_uuid=engine_uuid))
819 self.db.update_record(msg_id, dict(engine_uuid=engine_uuid))
815 except Exception:
820 except Exception:
816 self.log.error("DB Error saving task destination %r", msg_id, exc_info=True)
821 self.log.error("DB Error saving task destination %r", msg_id, exc_info=True)
817
822
818
823
819 def mia_task_request(self, idents, msg):
824 def mia_task_request(self, idents, msg):
820 raise NotImplementedError
825 raise NotImplementedError
821 client_id = idents[0]
826 client_id = idents[0]
822 # content = dict(mia=self.mia,status='ok')
827 # content = dict(mia=self.mia,status='ok')
823 # self.session.send('mia_reply', content=content, idents=client_id)
828 # self.session.send('mia_reply', content=content, idents=client_id)
824
829
825
830
826 #--------------------- IOPub Traffic ------------------------------
831 #--------------------- IOPub Traffic ------------------------------
827
832
828 def save_iopub_message(self, topics, msg):
833 def save_iopub_message(self, topics, msg):
829 """save an iopub message into the db"""
834 """save an iopub message into the db"""
830 # print (topics)
835 # print (topics)
831 try:
836 try:
832 msg = self.session.unserialize(msg, content=True)
837 msg = self.session.unserialize(msg, content=True)
833 except Exception:
838 except Exception:
834 self.log.error("iopub::invalid IOPub message", exc_info=True)
839 self.log.error("iopub::invalid IOPub message", exc_info=True)
835 return
840 return
836
841
837 parent = msg['parent_header']
842 parent = msg['parent_header']
838 if not parent:
843 if not parent:
839 self.log.warn("iopub::IOPub message lacks parent: %r", msg)
844 self.log.warn("iopub::IOPub message lacks parent: %r", msg)
840 return
845 return
841 msg_id = parent['msg_id']
846 msg_id = parent['msg_id']
842 msg_type = msg['header']['msg_type']
847 msg_type = msg['header']['msg_type']
843 content = msg['content']
848 content = msg['content']
844
849
845 # ensure msg_id is in db
850 # ensure msg_id is in db
846 try:
851 try:
847 rec = self.db.get_record(msg_id)
852 rec = self.db.get_record(msg_id)
848 except KeyError:
853 except KeyError:
849 rec = empty_record()
854 rec = empty_record()
850 rec['msg_id'] = msg_id
855 rec['msg_id'] = msg_id
851 self.db.add_record(msg_id, rec)
856 self.db.add_record(msg_id, rec)
852 # stream
857 # stream
853 d = {}
858 d = {}
854 if msg_type == 'stream':
859 if msg_type == 'stream':
855 name = content['name']
860 name = content['name']
856 s = rec[name] or ''
861 s = rec[name] or ''
857 d[name] = s + content['data']
862 d[name] = s + content['data']
858
863
859 elif msg_type == 'pyerr':
864 elif msg_type == 'pyerr':
860 d['pyerr'] = content
865 d['pyerr'] = content
861 elif msg_type == 'pyin':
866 elif msg_type == 'pyin':
862 d['pyin'] = content['code']
867 d['pyin'] = content['code']
863 elif msg_type in ('display_data', 'pyout'):
868 elif msg_type in ('display_data', 'pyout'):
864 d[msg_type] = content
869 d[msg_type] = content
865 elif msg_type == 'status':
870 elif msg_type == 'status':
866 pass
871 pass
867 elif msg_type == 'data_pub':
872 elif msg_type == 'data_pub':
868 self.log.info("ignored data_pub message for %s" % msg_id)
873 self.log.info("ignored data_pub message for %s" % msg_id)
869 else:
874 else:
870 self.log.warn("unhandled iopub msg_type: %r", msg_type)
875 self.log.warn("unhandled iopub msg_type: %r", msg_type)
871
876
872 if not d:
877 if not d:
873 return
878 return
874
879
875 try:
880 try:
876 self.db.update_record(msg_id, d)
881 self.db.update_record(msg_id, d)
877 except Exception:
882 except Exception:
878 self.log.error("DB Error saving iopub message %r", msg_id, exc_info=True)
883 self.log.error("DB Error saving iopub message %r", msg_id, exc_info=True)
879
884
880
885
881
886
882 #-------------------------------------------------------------------------
887 #-------------------------------------------------------------------------
883 # Registration requests
888 # Registration requests
884 #-------------------------------------------------------------------------
889 #-------------------------------------------------------------------------
885
890
886 def connection_request(self, client_id, msg):
891 def connection_request(self, client_id, msg):
887 """Reply with connection addresses for clients."""
892 """Reply with connection addresses for clients."""
888 self.log.info("client::client %r connected", client_id)
893 self.log.info("client::client %r connected", client_id)
889 content = dict(status='ok')
894 content = dict(status='ok')
890 jsonable = {}
895 jsonable = {}
891 for k,v in self.keytable.iteritems():
896 for k,v in self.keytable.iteritems():
892 if v not in self.dead_engines:
897 if v not in self.dead_engines:
893 jsonable[str(k)] = v
898 jsonable[str(k)] = v
894 content['engines'] = jsonable
899 content['engines'] = jsonable
895 self.session.send(self.query, 'connection_reply', content, parent=msg, ident=client_id)
900 self.session.send(self.query, 'connection_reply', content, parent=msg, ident=client_id)
896
901
897 def register_engine(self, reg, msg):
902 def register_engine(self, reg, msg):
898 """Register a new engine."""
903 """Register a new engine."""
899 content = msg['content']
904 content = msg['content']
900 try:
905 try:
901 uuid = content['uuid']
906 uuid = content['uuid']
902 except KeyError:
907 except KeyError:
903 self.log.error("registration::queue not specified", exc_info=True)
908 self.log.error("registration::queue not specified", exc_info=True)
904 return
909 return
905
910
906 eid = self._next_id
911 eid = self._next_id
907
912
908 self.log.debug("registration::register_engine(%i, %r)", eid, uuid)
913 self.log.debug("registration::register_engine(%i, %r)", eid, uuid)
909
914
910 content = dict(id=eid,status='ok',hb_period=self.heartmonitor.period)
915 content = dict(id=eid,status='ok',hb_period=self.heartmonitor.period)
911 # check if requesting available IDs:
916 # check if requesting available IDs:
912 if cast_bytes(uuid) in self.by_ident:
917 if cast_bytes(uuid) in self.by_ident:
913 try:
918 try:
914 raise KeyError("uuid %r in use" % uuid)
919 raise KeyError("uuid %r in use" % uuid)
915 except:
920 except:
916 content = error.wrap_exception()
921 content = error.wrap_exception()
917 self.log.error("uuid %r in use", uuid, exc_info=True)
922 self.log.error("uuid %r in use", uuid, exc_info=True)
918 else:
923 else:
919 for h, ec in self.incoming_registrations.iteritems():
924 for h, ec in self.incoming_registrations.iteritems():
920 if uuid == h:
925 if uuid == h:
921 try:
926 try:
922 raise KeyError("heart_id %r in use" % uuid)
927 raise KeyError("heart_id %r in use" % uuid)
923 except:
928 except:
924 self.log.error("heart_id %r in use", uuid, exc_info=True)
929 self.log.error("heart_id %r in use", uuid, exc_info=True)
925 content = error.wrap_exception()
930 content = error.wrap_exception()
926 break
931 break
927 elif uuid == ec.uuid:
932 elif uuid == ec.uuid:
928 try:
933 try:
929 raise KeyError("uuid %r in use" % uuid)
934 raise KeyError("uuid %r in use" % uuid)
930 except:
935 except:
931 self.log.error("uuid %r in use", uuid, exc_info=True)
936 self.log.error("uuid %r in use", uuid, exc_info=True)
932 content = error.wrap_exception()
937 content = error.wrap_exception()
933 break
938 break
934
939
935 msg = self.session.send(self.query, "registration_reply",
940 msg = self.session.send(self.query, "registration_reply",
936 content=content,
941 content=content,
937 ident=reg)
942 ident=reg)
938
943
939 heart = cast_bytes(uuid)
944 heart = cast_bytes(uuid)
940
945
941 if content['status'] == 'ok':
946 if content['status'] == 'ok':
942 if heart in self.heartmonitor.hearts:
947 if heart in self.heartmonitor.hearts:
943 # already beating
948 # already beating
944 self.incoming_registrations[heart] = EngineConnector(id=eid,uuid=uuid)
949 self.incoming_registrations[heart] = EngineConnector(id=eid,uuid=uuid)
945 self.finish_registration(heart)
950 self.finish_registration(heart)
946 else:
951 else:
947 purge = lambda : self._purge_stalled_registration(heart)
952 purge = lambda : self._purge_stalled_registration(heart)
948 dc = ioloop.DelayedCallback(purge, self.registration_timeout, self.loop)
953 dc = ioloop.DelayedCallback(purge, self.registration_timeout, self.loop)
949 dc.start()
954 dc.start()
950 self.incoming_registrations[heart] = EngineConnector(id=eid,uuid=uuid,stallback=dc)
955 self.incoming_registrations[heart] = EngineConnector(id=eid,uuid=uuid,stallback=dc)
951 else:
956 else:
952 self.log.error("registration::registration %i failed: %r", eid, content['evalue'])
957 self.log.error("registration::registration %i failed: %r", eid, content['evalue'])
953
958
954 return eid
959 return eid
955
960
956 def unregister_engine(self, ident, msg):
961 def unregister_engine(self, ident, msg):
957 """Unregister an engine that explicitly requested to leave."""
962 """Unregister an engine that explicitly requested to leave."""
958 try:
963 try:
959 eid = msg['content']['id']
964 eid = msg['content']['id']
960 except:
965 except:
961 self.log.error("registration::bad engine id for unregistration: %r", ident, exc_info=True)
966 self.log.error("registration::bad engine id for unregistration: %r", ident, exc_info=True)
962 return
967 return
963 self.log.info("registration::unregister_engine(%r)", eid)
968 self.log.info("registration::unregister_engine(%r)", eid)
964 # print (eid)
969 # print (eid)
965 uuid = self.keytable[eid]
970 uuid = self.keytable[eid]
966 content=dict(id=eid, uuid=uuid)
971 content=dict(id=eid, uuid=uuid)
967 self.dead_engines.add(uuid)
972 self.dead_engines.add(uuid)
968 # self.ids.remove(eid)
973 # self.ids.remove(eid)
969 # uuid = self.keytable.pop(eid)
974 # uuid = self.keytable.pop(eid)
970 #
975 #
971 # ec = self.engines.pop(eid)
976 # ec = self.engines.pop(eid)
972 # self.hearts.pop(ec.heartbeat)
977 # self.hearts.pop(ec.heartbeat)
973 # self.by_ident.pop(ec.queue)
978 # self.by_ident.pop(ec.queue)
974 # self.completed.pop(eid)
979 # self.completed.pop(eid)
975 handleit = lambda : self._handle_stranded_msgs(eid, uuid)
980 handleit = lambda : self._handle_stranded_msgs(eid, uuid)
976 dc = ioloop.DelayedCallback(handleit, self.registration_timeout, self.loop)
981 dc = ioloop.DelayedCallback(handleit, self.registration_timeout, self.loop)
977 dc.start()
982 dc.start()
978 ############## TODO: HANDLE IT ################
983 ############## TODO: HANDLE IT ################
979
984
980 self._save_engine_state()
985 self._save_engine_state()
981
986
982 if self.notifier:
987 if self.notifier:
983 self.session.send(self.notifier, "unregistration_notification", content=content)
988 self.session.send(self.notifier, "unregistration_notification", content=content)
984
989
985 def _handle_stranded_msgs(self, eid, uuid):
990 def _handle_stranded_msgs(self, eid, uuid):
986 """Handle messages known to be on an engine when the engine unregisters.
991 """Handle messages known to be on an engine when the engine unregisters.
987
992
988 It is possible that this will fire prematurely - that is, an engine will
993 It is possible that this will fire prematurely - that is, an engine will
989 go down after completing a result, and the client will be notified
994 go down after completing a result, and the client will be notified
990 that the result failed and later receive the actual result.
995 that the result failed and later receive the actual result.
991 """
996 """
992
997
993 outstanding = self.queues[eid]
998 outstanding = self.queues[eid]
994
999
995 for msg_id in outstanding:
1000 for msg_id in outstanding:
996 self.pending.remove(msg_id)
1001 self.pending.remove(msg_id)
997 self.all_completed.add(msg_id)
1002 self.all_completed.add(msg_id)
998 try:
1003 try:
999 raise error.EngineError("Engine %r died while running task %r" % (eid, msg_id))
1004 raise error.EngineError("Engine %r died while running task %r" % (eid, msg_id))
1000 except:
1005 except:
1001 content = error.wrap_exception()
1006 content = error.wrap_exception()
1002 # build a fake header:
1007 # build a fake header:
1003 header = {}
1008 header = {}
1004 header['engine'] = uuid
1009 header['engine'] = uuid
1005 header['date'] = datetime.now()
1010 header['date'] = datetime.now()
1006 rec = dict(result_content=content, result_header=header, result_buffers=[])
1011 rec = dict(result_content=content, result_header=header, result_buffers=[])
1007 rec['completed'] = header['date']
1012 rec['completed'] = header['date']
1008 rec['engine_uuid'] = uuid
1013 rec['engine_uuid'] = uuid
1009 try:
1014 try:
1010 self.db.update_record(msg_id, rec)
1015 self.db.update_record(msg_id, rec)
1011 except Exception:
1016 except Exception:
1012 self.log.error("DB Error handling stranded msg %r", msg_id, exc_info=True)
1017 self.log.error("DB Error handling stranded msg %r", msg_id, exc_info=True)
1013
1018
1014
1019
1015 def finish_registration(self, heart):
1020 def finish_registration(self, heart):
1016 """Second half of engine registration, called after our HeartMonitor
1021 """Second half of engine registration, called after our HeartMonitor
1017 has received a beat from the Engine's Heart."""
1022 has received a beat from the Engine's Heart."""
1018 try:
1023 try:
1019 ec = self.incoming_registrations.pop(heart)
1024 ec = self.incoming_registrations.pop(heart)
1020 except KeyError:
1025 except KeyError:
1021 self.log.error("registration::tried to finish nonexistant registration", exc_info=True)
1026 self.log.error("registration::tried to finish nonexistant registration", exc_info=True)
1022 return
1027 return
1023 self.log.info("registration::finished registering engine %i:%s", ec.id, ec.uuid)
1028 self.log.info("registration::finished registering engine %i:%s", ec.id, ec.uuid)
1024 if ec.stallback is not None:
1029 if ec.stallback is not None:
1025 ec.stallback.stop()
1030 ec.stallback.stop()
1026 eid = ec.id
1031 eid = ec.id
1027 self.ids.add(eid)
1032 self.ids.add(eid)
1028 self.keytable[eid] = ec.uuid
1033 self.keytable[eid] = ec.uuid
1029 self.engines[eid] = ec
1034 self.engines[eid] = ec
1030 self.by_ident[cast_bytes(ec.uuid)] = ec.id
1035 self.by_ident[cast_bytes(ec.uuid)] = ec.id
1031 self.queues[eid] = list()
1036 self.queues[eid] = list()
1032 self.tasks[eid] = list()
1037 self.tasks[eid] = list()
1033 self.completed[eid] = list()
1038 self.completed[eid] = list()
1034 self.hearts[heart] = eid
1039 self.hearts[heart] = eid
1035 content = dict(id=eid, uuid=self.engines[eid].uuid)
1040 content = dict(id=eid, uuid=self.engines[eid].uuid)
1036 if self.notifier:
1041 if self.notifier:
1037 self.session.send(self.notifier, "registration_notification", content=content)
1042 self.session.send(self.notifier, "registration_notification", content=content)
1038 self.log.info("engine::Engine Connected: %i", eid)
1043 self.log.info("engine::Engine Connected: %i", eid)
1039
1044
1040 self._save_engine_state()
1045 self._save_engine_state()
1041
1046
1042 def _purge_stalled_registration(self, heart):
1047 def _purge_stalled_registration(self, heart):
1043 if heart in self.incoming_registrations:
1048 if heart in self.incoming_registrations:
1044 ec = self.incoming_registrations.pop(heart)
1049 ec = self.incoming_registrations.pop(heart)
1045 self.log.info("registration::purging stalled registration: %i", ec.id)
1050 self.log.info("registration::purging stalled registration: %i", ec.id)
1046 else:
1051 else:
1047 pass
1052 pass
1048
1053
1049 #-------------------------------------------------------------------------
1054 #-------------------------------------------------------------------------
1050 # Engine State
1055 # Engine State
1051 #-------------------------------------------------------------------------
1056 #-------------------------------------------------------------------------
1052
1057
1053
1058
1054 def _cleanup_engine_state_file(self):
1059 def _cleanup_engine_state_file(self):
1055 """cleanup engine state mapping"""
1060 """cleanup engine state mapping"""
1056
1061
1057 if os.path.exists(self.engine_state_file):
1062 if os.path.exists(self.engine_state_file):
1058 self.log.debug("cleaning up engine state: %s", self.engine_state_file)
1063 self.log.debug("cleaning up engine state: %s", self.engine_state_file)
1059 try:
1064 try:
1060 os.remove(self.engine_state_file)
1065 os.remove(self.engine_state_file)
1061 except IOError:
1066 except IOError:
1062 self.log.error("Couldn't cleanup file: %s", self.engine_state_file, exc_info=True)
1067 self.log.error("Couldn't cleanup file: %s", self.engine_state_file, exc_info=True)
1063
1068
1064
1069
1065 def _save_engine_state(self):
1070 def _save_engine_state(self):
1066 """save engine mapping to JSON file"""
1071 """save engine mapping to JSON file"""
1067 if not self.engine_state_file:
1072 if not self.engine_state_file:
1068 return
1073 return
1069 self.log.debug("save engine state to %s" % self.engine_state_file)
1074 self.log.debug("save engine state to %s" % self.engine_state_file)
1070 state = {}
1075 state = {}
1071 engines = {}
1076 engines = {}
1072 for eid, ec in self.engines.iteritems():
1077 for eid, ec in self.engines.iteritems():
1073 if ec.uuid not in self.dead_engines:
1078 if ec.uuid not in self.dead_engines:
1074 engines[eid] = ec.uuid
1079 engines[eid] = ec.uuid
1075
1080
1076 state['engines'] = engines
1081 state['engines'] = engines
1077
1082
1078 state['next_id'] = self._idcounter
1083 state['next_id'] = self._idcounter
1079
1084
1080 with open(self.engine_state_file, 'w') as f:
1085 with open(self.engine_state_file, 'w') as f:
1081 json.dump(state, f)
1086 json.dump(state, f)
1082
1087
1083
1088
1084 def _load_engine_state(self):
1089 def _load_engine_state(self):
1085 """load engine mapping from JSON file"""
1090 """load engine mapping from JSON file"""
1086 if not os.path.exists(self.engine_state_file):
1091 if not os.path.exists(self.engine_state_file):
1087 return
1092 return
1088
1093
1089 self.log.info("loading engine state from %s" % self.engine_state_file)
1094 self.log.info("loading engine state from %s" % self.engine_state_file)
1090
1095
1091 with open(self.engine_state_file) as f:
1096 with open(self.engine_state_file) as f:
1092 state = json.load(f)
1097 state = json.load(f)
1093
1098
1094 save_notifier = self.notifier
1099 save_notifier = self.notifier
1095 self.notifier = None
1100 self.notifier = None
1096 for eid, uuid in state['engines'].iteritems():
1101 for eid, uuid in state['engines'].iteritems():
1097 heart = uuid.encode('ascii')
1102 heart = uuid.encode('ascii')
1098 # start with this heart as current and beating:
1103 # start with this heart as current and beating:
1099 self.heartmonitor.responses.add(heart)
1104 self.heartmonitor.responses.add(heart)
1100 self.heartmonitor.hearts.add(heart)
1105 self.heartmonitor.hearts.add(heart)
1101
1106
1102 self.incoming_registrations[heart] = EngineConnector(id=int(eid), uuid=uuid)
1107 self.incoming_registrations[heart] = EngineConnector(id=int(eid), uuid=uuid)
1103 self.finish_registration(heart)
1108 self.finish_registration(heart)
1104
1109
1105 self.notifier = save_notifier
1110 self.notifier = save_notifier
1106
1111
1107 self._idcounter = state['next_id']
1112 self._idcounter = state['next_id']
1108
1113
1109 #-------------------------------------------------------------------------
1114 #-------------------------------------------------------------------------
1110 # Client Requests
1115 # Client Requests
1111 #-------------------------------------------------------------------------
1116 #-------------------------------------------------------------------------
1112
1117
1113 def shutdown_request(self, client_id, msg):
1118 def shutdown_request(self, client_id, msg):
1114 """handle shutdown request."""
1119 """handle shutdown request."""
1115 self.session.send(self.query, 'shutdown_reply', content={'status': 'ok'}, ident=client_id)
1120 self.session.send(self.query, 'shutdown_reply', content={'status': 'ok'}, ident=client_id)
1116 # also notify other clients of shutdown
1121 # also notify other clients of shutdown
1117 self.session.send(self.notifier, 'shutdown_notice', content={'status': 'ok'})
1122 self.session.send(self.notifier, 'shutdown_notice', content={'status': 'ok'})
1118 dc = ioloop.DelayedCallback(lambda : self._shutdown(), 1000, self.loop)
1123 dc = ioloop.DelayedCallback(lambda : self._shutdown(), 1000, self.loop)
1119 dc.start()
1124 dc.start()
1120
1125
1121 def _shutdown(self):
1126 def _shutdown(self):
1122 self.log.info("hub::hub shutting down.")
1127 self.log.info("hub::hub shutting down.")
1123 time.sleep(0.1)
1128 time.sleep(0.1)
1124 sys.exit(0)
1129 sys.exit(0)
1125
1130
1126
1131
1127 def check_load(self, client_id, msg):
1132 def check_load(self, client_id, msg):
1128 content = msg['content']
1133 content = msg['content']
1129 try:
1134 try:
1130 targets = content['targets']
1135 targets = content['targets']
1131 targets = self._validate_targets(targets)
1136 targets = self._validate_targets(targets)
1132 except:
1137 except:
1133 content = error.wrap_exception()
1138 content = error.wrap_exception()
1134 self.session.send(self.query, "hub_error",
1139 self.session.send(self.query, "hub_error",
1135 content=content, ident=client_id)
1140 content=content, ident=client_id)
1136 return
1141 return
1137
1142
1138 content = dict(status='ok')
1143 content = dict(status='ok')
1139 # loads = {}
1144 # loads = {}
1140 for t in targets:
1145 for t in targets:
1141 content[bytes(t)] = len(self.queues[t])+len(self.tasks[t])
1146 content[bytes(t)] = len(self.queues[t])+len(self.tasks[t])
1142 self.session.send(self.query, "load_reply", content=content, ident=client_id)
1147 self.session.send(self.query, "load_reply", content=content, ident=client_id)
1143
1148
1144
1149
1145 def queue_status(self, client_id, msg):
1150 def queue_status(self, client_id, msg):
1146 """Return the Queue status of one or more targets.
1151 """Return the Queue status of one or more targets.
1147 if verbose: return the msg_ids
1152 if verbose: return the msg_ids
1148 else: return len of each type.
1153 else: return len of each type.
1149 keys: queue (pending MUX jobs)
1154 keys: queue (pending MUX jobs)
1150 tasks (pending Task jobs)
1155 tasks (pending Task jobs)
1151 completed (finished jobs from both queues)"""
1156 completed (finished jobs from both queues)"""
1152 content = msg['content']
1157 content = msg['content']
1153 targets = content['targets']
1158 targets = content['targets']
1154 try:
1159 try:
1155 targets = self._validate_targets(targets)
1160 targets = self._validate_targets(targets)
1156 except:
1161 except:
1157 content = error.wrap_exception()
1162 content = error.wrap_exception()
1158 self.session.send(self.query, "hub_error",
1163 self.session.send(self.query, "hub_error",
1159 content=content, ident=client_id)
1164 content=content, ident=client_id)
1160 return
1165 return
1161 verbose = content.get('verbose', False)
1166 verbose = content.get('verbose', False)
1162 content = dict(status='ok')
1167 content = dict(status='ok')
1163 for t in targets:
1168 for t in targets:
1164 queue = self.queues[t]
1169 queue = self.queues[t]
1165 completed = self.completed[t]
1170 completed = self.completed[t]
1166 tasks = self.tasks[t]
1171 tasks = self.tasks[t]
1167 if not verbose:
1172 if not verbose:
1168 queue = len(queue)
1173 queue = len(queue)
1169 completed = len(completed)
1174 completed = len(completed)
1170 tasks = len(tasks)
1175 tasks = len(tasks)
1171 content[str(t)] = {'queue': queue, 'completed': completed , 'tasks': tasks}
1176 content[str(t)] = {'queue': queue, 'completed': completed , 'tasks': tasks}
1172 content['unassigned'] = list(self.unassigned) if verbose else len(self.unassigned)
1177 content['unassigned'] = list(self.unassigned) if verbose else len(self.unassigned)
1173 # print (content)
1178 # print (content)
1174 self.session.send(self.query, "queue_reply", content=content, ident=client_id)
1179 self.session.send(self.query, "queue_reply", content=content, ident=client_id)
1175
1180
1176 def purge_results(self, client_id, msg):
1181 def purge_results(self, client_id, msg):
1177 """Purge results from memory. This method is more valuable before we move
1182 """Purge results from memory. This method is more valuable before we move
1178 to a DB based message storage mechanism."""
1183 to a DB based message storage mechanism."""
1179 content = msg['content']
1184 content = msg['content']
1180 self.log.info("Dropping records with %s", content)
1185 self.log.info("Dropping records with %s", content)
1181 msg_ids = content.get('msg_ids', [])
1186 msg_ids = content.get('msg_ids', [])
1182 reply = dict(status='ok')
1187 reply = dict(status='ok')
1183 if msg_ids == 'all':
1188 if msg_ids == 'all':
1184 try:
1189 try:
1185 self.db.drop_matching_records(dict(completed={'$ne':None}))
1190 self.db.drop_matching_records(dict(completed={'$ne':None}))
1186 except Exception:
1191 except Exception:
1187 reply = error.wrap_exception()
1192 reply = error.wrap_exception()
1188 else:
1193 else:
1189 pending = filter(lambda m: m in self.pending, msg_ids)
1194 pending = filter(lambda m: m in self.pending, msg_ids)
1190 if pending:
1195 if pending:
1191 try:
1196 try:
1192 raise IndexError("msg pending: %r" % pending[0])
1197 raise IndexError("msg pending: %r" % pending[0])
1193 except:
1198 except:
1194 reply = error.wrap_exception()
1199 reply = error.wrap_exception()
1195 else:
1200 else:
1196 try:
1201 try:
1197 self.db.drop_matching_records(dict(msg_id={'$in':msg_ids}))
1202 self.db.drop_matching_records(dict(msg_id={'$in':msg_ids}))
1198 except Exception:
1203 except Exception:
1199 reply = error.wrap_exception()
1204 reply = error.wrap_exception()
1200
1205
1201 if reply['status'] == 'ok':
1206 if reply['status'] == 'ok':
1202 eids = content.get('engine_ids', [])
1207 eids = content.get('engine_ids', [])
1203 for eid in eids:
1208 for eid in eids:
1204 if eid not in self.engines:
1209 if eid not in self.engines:
1205 try:
1210 try:
1206 raise IndexError("No such engine: %i" % eid)
1211 raise IndexError("No such engine: %i" % eid)
1207 except:
1212 except:
1208 reply = error.wrap_exception()
1213 reply = error.wrap_exception()
1209 break
1214 break
1210 uid = self.engines[eid].uuid
1215 uid = self.engines[eid].uuid
1211 try:
1216 try:
1212 self.db.drop_matching_records(dict(engine_uuid=uid, completed={'$ne':None}))
1217 self.db.drop_matching_records(dict(engine_uuid=uid, completed={'$ne':None}))
1213 except Exception:
1218 except Exception:
1214 reply = error.wrap_exception()
1219 reply = error.wrap_exception()
1215 break
1220 break
1216
1221
1217 self.session.send(self.query, 'purge_reply', content=reply, ident=client_id)
1222 self.session.send(self.query, 'purge_reply', content=reply, ident=client_id)
1218
1223
1219 def resubmit_task(self, client_id, msg):
1224 def resubmit_task(self, client_id, msg):
1220 """Resubmit one or more tasks."""
1225 """Resubmit one or more tasks."""
1221 def finish(reply):
1226 def finish(reply):
1222 self.session.send(self.query, 'resubmit_reply', content=reply, ident=client_id)
1227 self.session.send(self.query, 'resubmit_reply', content=reply, ident=client_id)
1223
1228
1224 content = msg['content']
1229 content = msg['content']
1225 msg_ids = content['msg_ids']
1230 msg_ids = content['msg_ids']
1226 reply = dict(status='ok')
1231 reply = dict(status='ok')
1227 try:
1232 try:
1228 records = self.db.find_records({'msg_id' : {'$in' : msg_ids}}, keys=[
1233 records = self.db.find_records({'msg_id' : {'$in' : msg_ids}}, keys=[
1229 'header', 'content', 'buffers'])
1234 'header', 'content', 'buffers'])
1230 except Exception:
1235 except Exception:
1231 self.log.error('db::db error finding tasks to resubmit', exc_info=True)
1236 self.log.error('db::db error finding tasks to resubmit', exc_info=True)
1232 return finish(error.wrap_exception())
1237 return finish(error.wrap_exception())
1233
1238
1234 # validate msg_ids
1239 # validate msg_ids
1235 found_ids = [ rec['msg_id'] for rec in records ]
1240 found_ids = [ rec['msg_id'] for rec in records ]
1236 pending_ids = [ msg_id for msg_id in found_ids if msg_id in self.pending ]
1241 pending_ids = [ msg_id for msg_id in found_ids if msg_id in self.pending ]
1237 if len(records) > len(msg_ids):
1242 if len(records) > len(msg_ids):
1238 try:
1243 try:
1239 raise RuntimeError("DB appears to be in an inconsistent state."
1244 raise RuntimeError("DB appears to be in an inconsistent state."
1240 "More matching records were found than should exist")
1245 "More matching records were found than should exist")
1241 except Exception:
1246 except Exception:
1242 return finish(error.wrap_exception())
1247 return finish(error.wrap_exception())
1243 elif len(records) < len(msg_ids):
1248 elif len(records) < len(msg_ids):
1244 missing = [ m for m in msg_ids if m not in found_ids ]
1249 missing = [ m for m in msg_ids if m not in found_ids ]
1245 try:
1250 try:
1246 raise KeyError("No such msg(s): %r" % missing)
1251 raise KeyError("No such msg(s): %r" % missing)
1247 except KeyError:
1252 except KeyError:
1248 return finish(error.wrap_exception())
1253 return finish(error.wrap_exception())
1249 elif pending_ids:
1254 elif pending_ids:
1250 pass
1255 pass
1251 # no need to raise on resubmit of pending task, now that we
1256 # no need to raise on resubmit of pending task, now that we
1252 # resubmit under new ID, but do we want to raise anyway?
1257 # resubmit under new ID, but do we want to raise anyway?
1253 # msg_id = invalid_ids[0]
1258 # msg_id = invalid_ids[0]
1254 # try:
1259 # try:
1255 # raise ValueError("Task(s) %r appears to be inflight" % )
1260 # raise ValueError("Task(s) %r appears to be inflight" % )
1256 # except Exception:
1261 # except Exception:
1257 # return finish(error.wrap_exception())
1262 # return finish(error.wrap_exception())
1258
1263
1259 # mapping of original IDs to resubmitted IDs
1264 # mapping of original IDs to resubmitted IDs
1260 resubmitted = {}
1265 resubmitted = {}
1261
1266
1262 # send the messages
1267 # send the messages
1263 for rec in records:
1268 for rec in records:
1264 header = rec['header']
1269 header = rec['header']
1265 msg = self.session.msg(header['msg_type'], parent=header)
1270 msg = self.session.msg(header['msg_type'], parent=header)
1266 msg_id = msg['msg_id']
1271 msg_id = msg['msg_id']
1267 msg['content'] = rec['content']
1272 msg['content'] = rec['content']
1268
1273
1269 # use the old header, but update msg_id and timestamp
1274 # use the old header, but update msg_id and timestamp
1270 fresh = msg['header']
1275 fresh = msg['header']
1271 header['msg_id'] = fresh['msg_id']
1276 header['msg_id'] = fresh['msg_id']
1272 header['date'] = fresh['date']
1277 header['date'] = fresh['date']
1273 msg['header'] = header
1278 msg['header'] = header
1274
1279
1275 self.session.send(self.resubmit, msg, buffers=rec['buffers'])
1280 self.session.send(self.resubmit, msg, buffers=rec['buffers'])
1276
1281
1277 resubmitted[rec['msg_id']] = msg_id
1282 resubmitted[rec['msg_id']] = msg_id
1278 self.pending.add(msg_id)
1283 self.pending.add(msg_id)
1279 msg['buffers'] = rec['buffers']
1284 msg['buffers'] = rec['buffers']
1280 try:
1285 try:
1281 self.db.add_record(msg_id, init_record(msg))
1286 self.db.add_record(msg_id, init_record(msg))
1282 except Exception:
1287 except Exception:
1283 self.log.error("db::DB Error updating record: %s", msg_id, exc_info=True)
1288 self.log.error("db::DB Error updating record: %s", msg_id, exc_info=True)
1284 return finish(error.wrap_exception())
1289 return finish(error.wrap_exception())
1285
1290
1286 finish(dict(status='ok', resubmitted=resubmitted))
1291 finish(dict(status='ok', resubmitted=resubmitted))
1287
1292
1288 # store the new IDs in the Task DB
1293 # store the new IDs in the Task DB
1289 for msg_id, resubmit_id in resubmitted.iteritems():
1294 for msg_id, resubmit_id in resubmitted.iteritems():
1290 try:
1295 try:
1291 self.db.update_record(msg_id, {'resubmitted' : resubmit_id})
1296 self.db.update_record(msg_id, {'resubmitted' : resubmit_id})
1292 except Exception:
1297 except Exception:
1293 self.log.error("db::DB Error updating record: %s", msg_id, exc_info=True)
1298 self.log.error("db::DB Error updating record: %s", msg_id, exc_info=True)
1294
1299
1295
1300
1296 def _extract_record(self, rec):
1301 def _extract_record(self, rec):
1297 """decompose a TaskRecord dict into subsection of reply for get_result"""
1302 """decompose a TaskRecord dict into subsection of reply for get_result"""
1298 io_dict = {}
1303 io_dict = {}
1299 for key in ('pyin', 'pyout', 'pyerr', 'stdout', 'stderr'):
1304 for key in ('pyin', 'pyout', 'pyerr', 'stdout', 'stderr'):
1300 io_dict[key] = rec[key]
1305 io_dict[key] = rec[key]
1301 content = {
1306 content = {
1302 'header': rec['header'],
1307 'header': rec['header'],
1303 'metadata': rec['metadata'],
1308 'metadata': rec['metadata'],
1304 'result_metadata': rec['result_metadata'],
1309 'result_metadata': rec['result_metadata'],
1305 'result_header' : rec['result_header'],
1310 'result_header' : rec['result_header'],
1306 'result_content': rec['result_content'],
1311 'result_content': rec['result_content'],
1307 'received' : rec['received'],
1312 'received' : rec['received'],
1308 'io' : io_dict,
1313 'io' : io_dict,
1309 }
1314 }
1310 if rec['result_buffers']:
1315 if rec['result_buffers']:
1311 buffers = map(bytes, rec['result_buffers'])
1316 buffers = map(bytes, rec['result_buffers'])
1312 else:
1317 else:
1313 buffers = []
1318 buffers = []
1314
1319
1315 return content, buffers
1320 return content, buffers
1316
1321
1317 def get_results(self, client_id, msg):
1322 def get_results(self, client_id, msg):
1318 """Get the result of 1 or more messages."""
1323 """Get the result of 1 or more messages."""
1319 content = msg['content']
1324 content = msg['content']
1320 msg_ids = sorted(set(content['msg_ids']))
1325 msg_ids = sorted(set(content['msg_ids']))
1321 statusonly = content.get('status_only', False)
1326 statusonly = content.get('status_only', False)
1322 pending = []
1327 pending = []
1323 completed = []
1328 completed = []
1324 content = dict(status='ok')
1329 content = dict(status='ok')
1325 content['pending'] = pending
1330 content['pending'] = pending
1326 content['completed'] = completed
1331 content['completed'] = completed
1327 buffers = []
1332 buffers = []
1328 if not statusonly:
1333 if not statusonly:
1329 try:
1334 try:
1330 matches = self.db.find_records(dict(msg_id={'$in':msg_ids}))
1335 matches = self.db.find_records(dict(msg_id={'$in':msg_ids}))
1331 # turn match list into dict, for faster lookup
1336 # turn match list into dict, for faster lookup
1332 records = {}
1337 records = {}
1333 for rec in matches:
1338 for rec in matches:
1334 records[rec['msg_id']] = rec
1339 records[rec['msg_id']] = rec
1335 except Exception:
1340 except Exception:
1336 content = error.wrap_exception()
1341 content = error.wrap_exception()
1337 self.session.send(self.query, "result_reply", content=content,
1342 self.session.send(self.query, "result_reply", content=content,
1338 parent=msg, ident=client_id)
1343 parent=msg, ident=client_id)
1339 return
1344 return
1340 else:
1345 else:
1341 records = {}
1346 records = {}
1342 for msg_id in msg_ids:
1347 for msg_id in msg_ids:
1343 if msg_id in self.pending:
1348 if msg_id in self.pending:
1344 pending.append(msg_id)
1349 pending.append(msg_id)
1345 elif msg_id in self.all_completed:
1350 elif msg_id in self.all_completed:
1346 completed.append(msg_id)
1351 completed.append(msg_id)
1347 if not statusonly:
1352 if not statusonly:
1348 c,bufs = self._extract_record(records[msg_id])
1353 c,bufs = self._extract_record(records[msg_id])
1349 content[msg_id] = c
1354 content[msg_id] = c
1350 buffers.extend(bufs)
1355 buffers.extend(bufs)
1351 elif msg_id in records:
1356 elif msg_id in records:
1352 if rec['completed']:
1357 if rec['completed']:
1353 completed.append(msg_id)
1358 completed.append(msg_id)
1354 c,bufs = self._extract_record(records[msg_id])
1359 c,bufs = self._extract_record(records[msg_id])
1355 content[msg_id] = c
1360 content[msg_id] = c
1356 buffers.extend(bufs)
1361 buffers.extend(bufs)
1357 else:
1362 else:
1358 pending.append(msg_id)
1363 pending.append(msg_id)
1359 else:
1364 else:
1360 try:
1365 try:
1361 raise KeyError('No such message: '+msg_id)
1366 raise KeyError('No such message: '+msg_id)
1362 except:
1367 except:
1363 content = error.wrap_exception()
1368 content = error.wrap_exception()
1364 break
1369 break
1365 self.session.send(self.query, "result_reply", content=content,
1370 self.session.send(self.query, "result_reply", content=content,
1366 parent=msg, ident=client_id,
1371 parent=msg, ident=client_id,
1367 buffers=buffers)
1372 buffers=buffers)
1368
1373
1369 def get_history(self, client_id, msg):
1374 def get_history(self, client_id, msg):
1370 """Get a list of all msg_ids in our DB records"""
1375 """Get a list of all msg_ids in our DB records"""
1371 try:
1376 try:
1372 msg_ids = self.db.get_history()
1377 msg_ids = self.db.get_history()
1373 except Exception as e:
1378 except Exception as e:
1374 content = error.wrap_exception()
1379 content = error.wrap_exception()
1375 else:
1380 else:
1376 content = dict(status='ok', history=msg_ids)
1381 content = dict(status='ok', history=msg_ids)
1377
1382
1378 self.session.send(self.query, "history_reply", content=content,
1383 self.session.send(self.query, "history_reply", content=content,
1379 parent=msg, ident=client_id)
1384 parent=msg, ident=client_id)
1380
1385
1381 def db_query(self, client_id, msg):
1386 def db_query(self, client_id, msg):
1382 """Perform a raw query on the task record database."""
1387 """Perform a raw query on the task record database."""
1383 content = msg['content']
1388 content = msg['content']
1384 query = content.get('query', {})
1389 query = content.get('query', {})
1385 keys = content.get('keys', None)
1390 keys = content.get('keys', None)
1386 buffers = []
1391 buffers = []
1387 empty = list()
1392 empty = list()
1388 try:
1393 try:
1389 records = self.db.find_records(query, keys)
1394 records = self.db.find_records(query, keys)
1390 except Exception as e:
1395 except Exception as e:
1391 content = error.wrap_exception()
1396 content = error.wrap_exception()
1392 else:
1397 else:
1393 # extract buffers from reply content:
1398 # extract buffers from reply content:
1394 if keys is not None:
1399 if keys is not None:
1395 buffer_lens = [] if 'buffers' in keys else None
1400 buffer_lens = [] if 'buffers' in keys else None
1396 result_buffer_lens = [] if 'result_buffers' in keys else None
1401 result_buffer_lens = [] if 'result_buffers' in keys else None
1397 else:
1402 else:
1398 buffer_lens = None
1403 buffer_lens = None
1399 result_buffer_lens = None
1404 result_buffer_lens = None
1400
1405
1401 for rec in records:
1406 for rec in records:
1402 # buffers may be None, so double check
1407 # buffers may be None, so double check
1403 b = rec.pop('buffers', empty) or empty
1408 b = rec.pop('buffers', empty) or empty
1404 if buffer_lens is not None:
1409 if buffer_lens is not None:
1405 buffer_lens.append(len(b))
1410 buffer_lens.append(len(b))
1406 buffers.extend(b)
1411 buffers.extend(b)
1407 rb = rec.pop('result_buffers', empty) or empty
1412 rb = rec.pop('result_buffers', empty) or empty
1408 if result_buffer_lens is not None:
1413 if result_buffer_lens is not None:
1409 result_buffer_lens.append(len(rb))
1414 result_buffer_lens.append(len(rb))
1410 buffers.extend(rb)
1415 buffers.extend(rb)
1411 content = dict(status='ok', records=records, buffer_lens=buffer_lens,
1416 content = dict(status='ok', records=records, buffer_lens=buffer_lens,
1412 result_buffer_lens=result_buffer_lens)
1417 result_buffer_lens=result_buffer_lens)
1413 # self.log.debug (content)
1418 # self.log.debug (content)
1414 self.session.send(self.query, "db_reply", content=content,
1419 self.session.send(self.query, "db_reply", content=content,
1415 parent=msg, ident=client_id,
1420 parent=msg, ident=client_id,
1416 buffers=buffers)
1421 buffers=buffers)
1417
1422
@@ -1,305 +1,305 b''
1 """A simple engine that talks to a controller over 0MQ.
1 """A simple engine that talks to a controller over 0MQ.
2 it handles registration, etc. and launches a kernel
2 it handles registration, etc. and launches a kernel
3 connected to the Controller's Schedulers.
3 connected to the Controller's Schedulers.
4
4
5 Authors:
5 Authors:
6
6
7 * Min RK
7 * Min RK
8 """
8 """
9 #-----------------------------------------------------------------------------
9 #-----------------------------------------------------------------------------
10 # Copyright (C) 2010-2011 The IPython Development Team
10 # Copyright (C) 2010-2011 The IPython Development Team
11 #
11 #
12 # Distributed under the terms of the BSD License. The full license is in
12 # Distributed under the terms of the BSD License. The full license is in
13 # the file COPYING, distributed as part of this software.
13 # the file COPYING, distributed as part of this software.
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15
15
16 from __future__ import print_function
16 from __future__ import print_function
17
17
18 import sys
18 import sys
19 import time
19 import time
20 from getpass import getpass
20 from getpass import getpass
21
21
22 import zmq
22 import zmq
23 from zmq.eventloop import ioloop, zmqstream
23 from zmq.eventloop import ioloop, zmqstream
24
24
25 from IPython.external.ssh import tunnel
25 from IPython.external.ssh import tunnel
26 # internal
26 # internal
27 from IPython.utils.localinterfaces import LOCALHOST
27 from IPython.utils.localinterfaces import localhost
28 from IPython.utils.traitlets import (
28 from IPython.utils.traitlets import (
29 Instance, Dict, Integer, Type, Float, Integer, Unicode, CBytes, Bool
29 Instance, Dict, Integer, Type, Float, Integer, Unicode, CBytes, Bool
30 )
30 )
31 from IPython.utils.py3compat import cast_bytes
31 from IPython.utils.py3compat import cast_bytes
32
32
33 from IPython.parallel.controller.heartmonitor import Heart
33 from IPython.parallel.controller.heartmonitor import Heart
34 from IPython.parallel.factory import RegistrationFactory
34 from IPython.parallel.factory import RegistrationFactory
35 from IPython.parallel.util import disambiguate_url
35 from IPython.parallel.util import disambiguate_url
36
36
37 from IPython.kernel.zmq.session import Message
37 from IPython.kernel.zmq.session import Message
38 from IPython.kernel.zmq.ipkernel import Kernel
38 from IPython.kernel.zmq.ipkernel import Kernel
39 from IPython.kernel.zmq.kernelapp import IPKernelApp
39 from IPython.kernel.zmq.kernelapp import IPKernelApp
40
40
41 class EngineFactory(RegistrationFactory):
41 class EngineFactory(RegistrationFactory):
42 """IPython engine"""
42 """IPython engine"""
43
43
44 # configurables:
44 # configurables:
45 out_stream_factory=Type('IPython.kernel.zmq.iostream.OutStream', config=True,
45 out_stream_factory=Type('IPython.kernel.zmq.iostream.OutStream', config=True,
46 help="""The OutStream for handling stdout/err.
46 help="""The OutStream for handling stdout/err.
47 Typically 'IPython.kernel.zmq.iostream.OutStream'""")
47 Typically 'IPython.kernel.zmq.iostream.OutStream'""")
48 display_hook_factory=Type('IPython.kernel.zmq.displayhook.ZMQDisplayHook', config=True,
48 display_hook_factory=Type('IPython.kernel.zmq.displayhook.ZMQDisplayHook', config=True,
49 help="""The class for handling displayhook.
49 help="""The class for handling displayhook.
50 Typically 'IPython.kernel.zmq.displayhook.ZMQDisplayHook'""")
50 Typically 'IPython.kernel.zmq.displayhook.ZMQDisplayHook'""")
51 location=Unicode(config=True,
51 location=Unicode(config=True,
52 help="""The location (an IP address) of the controller. This is
52 help="""The location (an IP address) of the controller. This is
53 used for disambiguating URLs, to determine whether
53 used for disambiguating URLs, to determine whether
54 loopback should be used to connect or the public address.""")
54 loopback should be used to connect or the public address.""")
55 timeout=Float(5.0, config=True,
55 timeout=Float(5.0, config=True,
56 help="""The time (in seconds) to wait for the Controller to respond
56 help="""The time (in seconds) to wait for the Controller to respond
57 to registration requests before giving up.""")
57 to registration requests before giving up.""")
58 max_heartbeat_misses=Integer(50, config=True,
58 max_heartbeat_misses=Integer(50, config=True,
59 help="""The maximum number of times a check for the heartbeat ping of a
59 help="""The maximum number of times a check for the heartbeat ping of a
60 controller can be missed before shutting down the engine.
60 controller can be missed before shutting down the engine.
61
61
62 If set to 0, the check is disabled.""")
62 If set to 0, the check is disabled.""")
63 sshserver=Unicode(config=True,
63 sshserver=Unicode(config=True,
64 help="""The SSH server to use for tunneling connections to the Controller.""")
64 help="""The SSH server to use for tunneling connections to the Controller.""")
65 sshkey=Unicode(config=True,
65 sshkey=Unicode(config=True,
66 help="""The SSH private key file to use when tunneling connections to the Controller.""")
66 help="""The SSH private key file to use when tunneling connections to the Controller.""")
67 paramiko=Bool(sys.platform == 'win32', config=True,
67 paramiko=Bool(sys.platform == 'win32', config=True,
68 help="""Whether to use paramiko instead of openssh for tunnels.""")
68 help="""Whether to use paramiko instead of openssh for tunnels.""")
69
69
70
70
71 # not configurable:
71 # not configurable:
72 connection_info = Dict()
72 connection_info = Dict()
73 user_ns = Dict()
73 user_ns = Dict()
74 id = Integer(allow_none=True)
74 id = Integer(allow_none=True)
75 registrar = Instance('zmq.eventloop.zmqstream.ZMQStream')
75 registrar = Instance('zmq.eventloop.zmqstream.ZMQStream')
76 kernel = Instance(Kernel)
76 kernel = Instance(Kernel)
77 hb_check_period=Integer()
77 hb_check_period=Integer()
78
78
79 # States for the heartbeat monitoring
79 # States for the heartbeat monitoring
80 # Initial values for monitored and pinged must satisfy "monitored > pinged == False" so that
80 # Initial values for monitored and pinged must satisfy "monitored > pinged == False" so that
81 # during the first check no "missed" ping is reported. Must be floats for Python 3 compatibility.
81 # during the first check no "missed" ping is reported. Must be floats for Python 3 compatibility.
82 _hb_last_pinged = 0.0
82 _hb_last_pinged = 0.0
83 _hb_last_monitored = 0.0
83 _hb_last_monitored = 0.0
84 _hb_missed_beats = 0
84 _hb_missed_beats = 0
85 # The zmq Stream which receives the pings from the Heart
85 # The zmq Stream which receives the pings from the Heart
86 _hb_listener = None
86 _hb_listener = None
87
87
88 bident = CBytes()
88 bident = CBytes()
89 ident = Unicode()
89 ident = Unicode()
90 def _ident_changed(self, name, old, new):
90 def _ident_changed(self, name, old, new):
91 self.bident = cast_bytes(new)
91 self.bident = cast_bytes(new)
92 using_ssh=Bool(False)
92 using_ssh=Bool(False)
93
93
94
94
95 def __init__(self, **kwargs):
95 def __init__(self, **kwargs):
96 super(EngineFactory, self).__init__(**kwargs)
96 super(EngineFactory, self).__init__(**kwargs)
97 self.ident = self.session.session
97 self.ident = self.session.session
98
98
99 def init_connector(self):
99 def init_connector(self):
100 """construct connection function, which handles tunnels."""
100 """construct connection function, which handles tunnels."""
101 self.using_ssh = bool(self.sshkey or self.sshserver)
101 self.using_ssh = bool(self.sshkey or self.sshserver)
102
102
103 if self.sshkey and not self.sshserver:
103 if self.sshkey and not self.sshserver:
104 # We are using ssh directly to the controller, tunneling localhost to localhost
104 # We are using ssh directly to the controller, tunneling localhost to localhost
105 self.sshserver = self.url.split('://')[1].split(':')[0]
105 self.sshserver = self.url.split('://')[1].split(':')[0]
106
106
107 if self.using_ssh:
107 if self.using_ssh:
108 if tunnel.try_passwordless_ssh(self.sshserver, self.sshkey, self.paramiko):
108 if tunnel.try_passwordless_ssh(self.sshserver, self.sshkey, self.paramiko):
109 password=False
109 password=False
110 else:
110 else:
111 password = getpass("SSH Password for %s: "%self.sshserver)
111 password = getpass("SSH Password for %s: "%self.sshserver)
112 else:
112 else:
113 password = False
113 password = False
114
114
115 def connect(s, url):
115 def connect(s, url):
116 url = disambiguate_url(url, self.location)
116 url = disambiguate_url(url, self.location)
117 if self.using_ssh:
117 if self.using_ssh:
118 self.log.debug("Tunneling connection to %s via %s", url, self.sshserver)
118 self.log.debug("Tunneling connection to %s via %s", url, self.sshserver)
119 return tunnel.tunnel_connection(s, url, self.sshserver,
119 return tunnel.tunnel_connection(s, url, self.sshserver,
120 keyfile=self.sshkey, paramiko=self.paramiko,
120 keyfile=self.sshkey, paramiko=self.paramiko,
121 password=password,
121 password=password,
122 )
122 )
123 else:
123 else:
124 return s.connect(url)
124 return s.connect(url)
125
125
126 def maybe_tunnel(url):
126 def maybe_tunnel(url):
127 """like connect, but don't complete the connection (for use by heartbeat)"""
127 """like connect, but don't complete the connection (for use by heartbeat)"""
128 url = disambiguate_url(url, self.location)
128 url = disambiguate_url(url, self.location)
129 if self.using_ssh:
129 if self.using_ssh:
130 self.log.debug("Tunneling connection to %s via %s", url, self.sshserver)
130 self.log.debug("Tunneling connection to %s via %s", url, self.sshserver)
131 url,tunnelobj = tunnel.open_tunnel(url, self.sshserver,
131 url,tunnelobj = tunnel.open_tunnel(url, self.sshserver,
132 keyfile=self.sshkey, paramiko=self.paramiko,
132 keyfile=self.sshkey, paramiko=self.paramiko,
133 password=password,
133 password=password,
134 )
134 )
135 return str(url)
135 return str(url)
136 return connect, maybe_tunnel
136 return connect, maybe_tunnel
137
137
138 def register(self):
138 def register(self):
139 """send the registration_request"""
139 """send the registration_request"""
140
140
141 self.log.info("Registering with controller at %s"%self.url)
141 self.log.info("Registering with controller at %s"%self.url)
142 ctx = self.context
142 ctx = self.context
143 connect,maybe_tunnel = self.init_connector()
143 connect,maybe_tunnel = self.init_connector()
144 reg = ctx.socket(zmq.DEALER)
144 reg = ctx.socket(zmq.DEALER)
145 reg.setsockopt(zmq.IDENTITY, self.bident)
145 reg.setsockopt(zmq.IDENTITY, self.bident)
146 connect(reg, self.url)
146 connect(reg, self.url)
147 self.registrar = zmqstream.ZMQStream(reg, self.loop)
147 self.registrar = zmqstream.ZMQStream(reg, self.loop)
148
148
149
149
150 content = dict(uuid=self.ident)
150 content = dict(uuid=self.ident)
151 self.registrar.on_recv(lambda msg: self.complete_registration(msg, connect, maybe_tunnel))
151 self.registrar.on_recv(lambda msg: self.complete_registration(msg, connect, maybe_tunnel))
152 # print (self.session.key)
152 # print (self.session.key)
153 self.session.send(self.registrar, "registration_request", content=content)
153 self.session.send(self.registrar, "registration_request", content=content)
154
154
155 def _report_ping(self, msg):
155 def _report_ping(self, msg):
156 """Callback for when the heartmonitor.Heart receives a ping"""
156 """Callback for when the heartmonitor.Heart receives a ping"""
157 #self.log.debug("Received a ping: %s", msg)
157 #self.log.debug("Received a ping: %s", msg)
158 self._hb_last_pinged = time.time()
158 self._hb_last_pinged = time.time()
159
159
160 def complete_registration(self, msg, connect, maybe_tunnel):
160 def complete_registration(self, msg, connect, maybe_tunnel):
161 # print msg
161 # print msg
162 self._abort_dc.stop()
162 self._abort_dc.stop()
163 ctx = self.context
163 ctx = self.context
164 loop = self.loop
164 loop = self.loop
165 identity = self.bident
165 identity = self.bident
166 idents,msg = self.session.feed_identities(msg)
166 idents,msg = self.session.feed_identities(msg)
167 msg = self.session.unserialize(msg)
167 msg = self.session.unserialize(msg)
168 content = msg['content']
168 content = msg['content']
169 info = self.connection_info
169 info = self.connection_info
170
170
171 def url(key):
171 def url(key):
172 """get zmq url for given channel"""
172 """get zmq url for given channel"""
173 return str(info["interface"] + ":%i" % info[key])
173 return str(info["interface"] + ":%i" % info[key])
174
174
175 if content['status'] == 'ok':
175 if content['status'] == 'ok':
176 self.id = int(content['id'])
176 self.id = int(content['id'])
177
177
178 # launch heartbeat
178 # launch heartbeat
179 # possibly forward hb ports with tunnels
179 # possibly forward hb ports with tunnels
180 hb_ping = maybe_tunnel(url('hb_ping'))
180 hb_ping = maybe_tunnel(url('hb_ping'))
181 hb_pong = maybe_tunnel(url('hb_pong'))
181 hb_pong = maybe_tunnel(url('hb_pong'))
182
182
183 hb_monitor = None
183 hb_monitor = None
184 if self.max_heartbeat_misses > 0:
184 if self.max_heartbeat_misses > 0:
185 # Add a monitor socket which will record the last time a ping was seen
185 # Add a monitor socket which will record the last time a ping was seen
186 mon = self.context.socket(zmq.SUB)
186 mon = self.context.socket(zmq.SUB)
187 mport = mon.bind_to_random_port('tcp://%s' % LOCALHOST)
187 mport = mon.bind_to_random_port('tcp://%s' % localhost())
188 mon.setsockopt(zmq.SUBSCRIBE, b"")
188 mon.setsockopt(zmq.SUBSCRIBE, b"")
189 self._hb_listener = zmqstream.ZMQStream(mon, self.loop)
189 self._hb_listener = zmqstream.ZMQStream(mon, self.loop)
190 self._hb_listener.on_recv(self._report_ping)
190 self._hb_listener.on_recv(self._report_ping)
191
191
192
192
193 hb_monitor = "tcp://%s:%i" % (LOCALHOST, mport)
193 hb_monitor = "tcp://%s:%i" % (localhost(), mport)
194
194
195 heart = Heart(hb_ping, hb_pong, hb_monitor , heart_id=identity)
195 heart = Heart(hb_ping, hb_pong, hb_monitor , heart_id=identity)
196 heart.start()
196 heart.start()
197
197
198 # create Shell Connections (MUX, Task, etc.):
198 # create Shell Connections (MUX, Task, etc.):
199 shell_addrs = url('mux'), url('task')
199 shell_addrs = url('mux'), url('task')
200
200
201 # Use only one shell stream for mux and tasks
201 # Use only one shell stream for mux and tasks
202 stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop)
202 stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop)
203 stream.setsockopt(zmq.IDENTITY, identity)
203 stream.setsockopt(zmq.IDENTITY, identity)
204 shell_streams = [stream]
204 shell_streams = [stream]
205 for addr in shell_addrs:
205 for addr in shell_addrs:
206 connect(stream, addr)
206 connect(stream, addr)
207
207
208 # control stream:
208 # control stream:
209 control_addr = url('control')
209 control_addr = url('control')
210 control_stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop)
210 control_stream = zmqstream.ZMQStream(ctx.socket(zmq.ROUTER), loop)
211 control_stream.setsockopt(zmq.IDENTITY, identity)
211 control_stream.setsockopt(zmq.IDENTITY, identity)
212 connect(control_stream, control_addr)
212 connect(control_stream, control_addr)
213
213
214 # create iopub stream:
214 # create iopub stream:
215 iopub_addr = url('iopub')
215 iopub_addr = url('iopub')
216 iopub_socket = ctx.socket(zmq.PUB)
216 iopub_socket = ctx.socket(zmq.PUB)
217 iopub_socket.setsockopt(zmq.IDENTITY, identity)
217 iopub_socket.setsockopt(zmq.IDENTITY, identity)
218 connect(iopub_socket, iopub_addr)
218 connect(iopub_socket, iopub_addr)
219
219
220 # disable history:
220 # disable history:
221 self.config.HistoryManager.hist_file = ':memory:'
221 self.config.HistoryManager.hist_file = ':memory:'
222
222
223 # Redirect input streams and set a display hook.
223 # Redirect input streams and set a display hook.
224 if self.out_stream_factory:
224 if self.out_stream_factory:
225 sys.stdout = self.out_stream_factory(self.session, iopub_socket, u'stdout')
225 sys.stdout = self.out_stream_factory(self.session, iopub_socket, u'stdout')
226 sys.stdout.topic = cast_bytes('engine.%i.stdout' % self.id)
226 sys.stdout.topic = cast_bytes('engine.%i.stdout' % self.id)
227 sys.stderr = self.out_stream_factory(self.session, iopub_socket, u'stderr')
227 sys.stderr = self.out_stream_factory(self.session, iopub_socket, u'stderr')
228 sys.stderr.topic = cast_bytes('engine.%i.stderr' % self.id)
228 sys.stderr.topic = cast_bytes('engine.%i.stderr' % self.id)
229 if self.display_hook_factory:
229 if self.display_hook_factory:
230 sys.displayhook = self.display_hook_factory(self.session, iopub_socket)
230 sys.displayhook = self.display_hook_factory(self.session, iopub_socket)
231 sys.displayhook.topic = cast_bytes('engine.%i.pyout' % self.id)
231 sys.displayhook.topic = cast_bytes('engine.%i.pyout' % self.id)
232
232
233 self.kernel = Kernel(parent=self, int_id=self.id, ident=self.ident, session=self.session,
233 self.kernel = Kernel(parent=self, int_id=self.id, ident=self.ident, session=self.session,
234 control_stream=control_stream, shell_streams=shell_streams, iopub_socket=iopub_socket,
234 control_stream=control_stream, shell_streams=shell_streams, iopub_socket=iopub_socket,
235 loop=loop, user_ns=self.user_ns, log=self.log)
235 loop=loop, user_ns=self.user_ns, log=self.log)
236
236
237 self.kernel.shell.display_pub.topic = cast_bytes('engine.%i.displaypub' % self.id)
237 self.kernel.shell.display_pub.topic = cast_bytes('engine.%i.displaypub' % self.id)
238
238
239
239
240 # periodically check the heartbeat pings of the controller
240 # periodically check the heartbeat pings of the controller
241 # Should be started here and not in "start()" so that the right period can be taken
241 # Should be started here and not in "start()" so that the right period can be taken
242 # from the hubs HeartBeatMonitor.period
242 # from the hubs HeartBeatMonitor.period
243 if self.max_heartbeat_misses > 0:
243 if self.max_heartbeat_misses > 0:
244 # Use a slightly bigger check period than the hub signal period to not warn unnecessary
244 # Use a slightly bigger check period than the hub signal period to not warn unnecessary
245 self.hb_check_period = int(content['hb_period'])+10
245 self.hb_check_period = int(content['hb_period'])+10
246 self.log.info("Starting to monitor the heartbeat signal from the hub every %i ms." , self.hb_check_period)
246 self.log.info("Starting to monitor the heartbeat signal from the hub every %i ms." , self.hb_check_period)
247 self._hb_reporter = ioloop.PeriodicCallback(self._hb_monitor, self.hb_check_period, self.loop)
247 self._hb_reporter = ioloop.PeriodicCallback(self._hb_monitor, self.hb_check_period, self.loop)
248 self._hb_reporter.start()
248 self._hb_reporter.start()
249 else:
249 else:
250 self.log.info("Monitoring of the heartbeat signal from the hub is not enabled.")
250 self.log.info("Monitoring of the heartbeat signal from the hub is not enabled.")
251
251
252
252
253 # FIXME: This is a hack until IPKernelApp and IPEngineApp can be fully merged
253 # FIXME: This is a hack until IPKernelApp and IPEngineApp can be fully merged
254 app = IPKernelApp(parent=self, shell=self.kernel.shell, kernel=self.kernel, log=self.log)
254 app = IPKernelApp(parent=self, shell=self.kernel.shell, kernel=self.kernel, log=self.log)
255 app.init_profile_dir()
255 app.init_profile_dir()
256 app.init_code()
256 app.init_code()
257
257
258 self.kernel.start()
258 self.kernel.start()
259 else:
259 else:
260 self.log.fatal("Registration Failed: %s"%msg)
260 self.log.fatal("Registration Failed: %s"%msg)
261 raise Exception("Registration Failed: %s"%msg)
261 raise Exception("Registration Failed: %s"%msg)
262
262
263 self.log.info("Completed registration with id %i"%self.id)
263 self.log.info("Completed registration with id %i"%self.id)
264
264
265
265
266 def abort(self):
266 def abort(self):
267 self.log.fatal("Registration timed out after %.1f seconds"%self.timeout)
267 self.log.fatal("Registration timed out after %.1f seconds"%self.timeout)
268 if self.url.startswith('127.'):
268 if self.url.startswith('127.'):
269 self.log.fatal("""
269 self.log.fatal("""
270 If the controller and engines are not on the same machine,
270 If the controller and engines are not on the same machine,
271 you will have to instruct the controller to listen on an external IP (in ipcontroller_config.py):
271 you will have to instruct the controller to listen on an external IP (in ipcontroller_config.py):
272 c.HubFactory.ip='*' # for all interfaces, internal and external
272 c.HubFactory.ip='*' # for all interfaces, internal and external
273 c.HubFactory.ip='192.168.1.101' # or any interface that the engines can see
273 c.HubFactory.ip='192.168.1.101' # or any interface that the engines can see
274 or tunnel connections via ssh.
274 or tunnel connections via ssh.
275 """)
275 """)
276 self.session.send(self.registrar, "unregistration_request", content=dict(id=self.id))
276 self.session.send(self.registrar, "unregistration_request", content=dict(id=self.id))
277 time.sleep(1)
277 time.sleep(1)
278 sys.exit(255)
278 sys.exit(255)
279
279
280 def _hb_monitor(self):
280 def _hb_monitor(self):
281 """Callback to monitor the heartbeat from the controller"""
281 """Callback to monitor the heartbeat from the controller"""
282 self._hb_listener.flush()
282 self._hb_listener.flush()
283 if self._hb_last_monitored > self._hb_last_pinged:
283 if self._hb_last_monitored > self._hb_last_pinged:
284 self._hb_missed_beats += 1
284 self._hb_missed_beats += 1
285 self.log.warn("No heartbeat in the last %s ms (%s time(s) in a row).", self.hb_check_period, self._hb_missed_beats)
285 self.log.warn("No heartbeat in the last %s ms (%s time(s) in a row).", self.hb_check_period, self._hb_missed_beats)
286 else:
286 else:
287 #self.log.debug("Heartbeat received (after missing %s beats).", self._hb_missed_beats)
287 #self.log.debug("Heartbeat received (after missing %s beats).", self._hb_missed_beats)
288 self._hb_missed_beats = 0
288 self._hb_missed_beats = 0
289
289
290 if self._hb_missed_beats >= self.max_heartbeat_misses:
290 if self._hb_missed_beats >= self.max_heartbeat_misses:
291 self.log.fatal("Maximum number of heartbeats misses reached (%s times %s ms), shutting down.",
291 self.log.fatal("Maximum number of heartbeats misses reached (%s times %s ms), shutting down.",
292 self.max_heartbeat_misses, self.hb_check_period)
292 self.max_heartbeat_misses, self.hb_check_period)
293 self.session.send(self.registrar, "unregistration_request", content=dict(id=self.id))
293 self.session.send(self.registrar, "unregistration_request", content=dict(id=self.id))
294 self.loop.stop()
294 self.loop.stop()
295
295
296 self._hb_last_monitored = time.time()
296 self._hb_last_monitored = time.time()
297
297
298
298
299 def start(self):
299 def start(self):
300 dc = ioloop.DelayedCallback(self.register, 0, self.loop)
300 dc = ioloop.DelayedCallback(self.register, 0, self.loop)
301 dc.start()
301 dc.start()
302 self._abort_dc = ioloop.DelayedCallback(self.abort, self.timeout*1000, self.loop)
302 self._abort_dc = ioloop.DelayedCallback(self.abort, self.timeout*1000, self.loop)
303 self._abort_dc.start()
303 self._abort_dc.start()
304
304
305
305
@@ -1,79 +1,81 b''
1 """Base config factories.
1 """Base config factories.
2
2
3 Authors:
3 Authors:
4
4
5 * Min RK
5 * Min RK
6 """
6 """
7
7
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9 # Copyright (C) 2010-2011 The IPython Development Team
9 # Copyright (C) 2010-2011 The IPython Development Team
10 #
10 #
11 # Distributed under the terms of the BSD License. The full license is in
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
12 # the file COPYING, distributed as part of this software.
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14
14
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 # Imports
16 # Imports
17 #-----------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18
18
19
19
20 import logging
20 import logging
21 import os
21 import os
22
22
23 import zmq
23 import zmq
24 from zmq.eventloop.ioloop import IOLoop
24 from zmq.eventloop.ioloop import IOLoop
25
25
26 from IPython.config.configurable import Configurable
26 from IPython.config.configurable import Configurable
27 from IPython.utils.localinterfaces import LOCALHOST
27 from IPython.utils.localinterfaces import localhost
28 from IPython.utils.traitlets import Integer, Instance, Unicode
28 from IPython.utils.traitlets import Integer, Instance, Unicode
29
29
30 from IPython.parallel.util import select_random_ports
30 from IPython.parallel.util import select_random_ports
31 from IPython.kernel.zmq.session import Session, SessionFactory
31 from IPython.kernel.zmq.session import Session, SessionFactory
32
32
33 #-----------------------------------------------------------------------------
33 #-----------------------------------------------------------------------------
34 # Classes
34 # Classes
35 #-----------------------------------------------------------------------------
35 #-----------------------------------------------------------------------------
36
36
37
37
38 class RegistrationFactory(SessionFactory):
38 class RegistrationFactory(SessionFactory):
39 """The Base Configurable for objects that involve registration."""
39 """The Base Configurable for objects that involve registration."""
40
40
41 url = Unicode('', config=True,
41 url = Unicode('', config=True,
42 help="""The 0MQ url used for registration. This sets transport, ip, and port
42 help="""The 0MQ url used for registration. This sets transport, ip, and port
43 in one variable. For example: url='tcp://%s:12345' or
43 in one variable. For example: url='tcp://127.0.0.1:12345' or
44 url='epgm://*:90210'"""
44 url='epgm://*:90210'"""
45 % LOCALHOST) # url takes precedence over ip,regport,transport
45 ) # url takes precedence over ip,regport,transport
46 transport = Unicode('tcp', config=True,
46 transport = Unicode('tcp', config=True,
47 help="""The 0MQ transport for communications. This will likely be
47 help="""The 0MQ transport for communications. This will likely be
48 the default of 'tcp', but other values include 'ipc', 'epgm', 'inproc'.""")
48 the default of 'tcp', but other values include 'ipc', 'epgm', 'inproc'.""")
49 ip = Unicode(LOCALHOST, config=True,
49 ip = Unicode(config=True,
50 help="""The IP address for registration. This is generally either
50 help="""The IP address for registration. This is generally either
51 '127.0.0.1' for loopback only or '*' for all interfaces.
51 '127.0.0.1' for loopback only or '*' for all interfaces.
52 [default: '%s']""" % LOCALHOST)
52 """)
53 def _ip_default(self):
54 return localhost()
53 regport = Integer(config=True,
55 regport = Integer(config=True,
54 help="""The port on which the Hub listens for registration.""")
56 help="""The port on which the Hub listens for registration.""")
55 def _regport_default(self):
57 def _regport_default(self):
56 return select_random_ports(1)[0]
58 return select_random_ports(1)[0]
57
59
58 def __init__(self, **kwargs):
60 def __init__(self, **kwargs):
59 super(RegistrationFactory, self).__init__(**kwargs)
61 super(RegistrationFactory, self).__init__(**kwargs)
60 self._propagate_url()
62 self._propagate_url()
61 self._rebuild_url()
63 self._rebuild_url()
62 self.on_trait_change(self._propagate_url, 'url')
64 self.on_trait_change(self._propagate_url, 'url')
63 self.on_trait_change(self._rebuild_url, 'ip')
65 self.on_trait_change(self._rebuild_url, 'ip')
64 self.on_trait_change(self._rebuild_url, 'transport')
66 self.on_trait_change(self._rebuild_url, 'transport')
65 self.on_trait_change(self._rebuild_url, 'regport')
67 self.on_trait_change(self._rebuild_url, 'regport')
66
68
67 def _rebuild_url(self):
69 def _rebuild_url(self):
68 self.url = "%s://%s:%i"%(self.transport, self.ip, self.regport)
70 self.url = "%s://%s:%i"%(self.transport, self.ip, self.regport)
69
71
70 def _propagate_url(self):
72 def _propagate_url(self):
71 """Ensure self.url contains full transport://interface:port"""
73 """Ensure self.url contains full transport://interface:port"""
72 if self.url:
74 if self.url:
73 iface = self.url.split('://',1)
75 iface = self.url.split('://',1)
74 if len(iface) == 2:
76 if len(iface) == 2:
75 self.transport,iface = iface
77 self.transport,iface = iface
76 iface = iface.split(':')
78 iface = iface.split(':')
77 self.ip = iface[0]
79 self.ip = iface[0]
78 if iface[1]:
80 if iface[1]:
79 self.regport = int(iface[1])
81 self.regport = int(iface[1])
@@ -1,368 +1,368 b''
1 """some generic utilities for dealing with classes, urls, and serialization
1 """some generic utilities for dealing with classes, urls, and serialization
2
2
3 Authors:
3 Authors:
4
4
5 * Min RK
5 * Min RK
6 """
6 """
7 #-----------------------------------------------------------------------------
7 #-----------------------------------------------------------------------------
8 # Copyright (C) 2010-2011 The IPython Development Team
8 # Copyright (C) 2010-2011 The IPython Development Team
9 #
9 #
10 # Distributed under the terms of the BSD License. The full license is in
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
11 # the file COPYING, distributed as part of this software.
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13
13
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17
17
18 # Standard library imports.
18 # Standard library imports.
19 import logging
19 import logging
20 import os
20 import os
21 import re
21 import re
22 import stat
22 import stat
23 import socket
23 import socket
24 import sys
24 import sys
25 from signal import signal, SIGINT, SIGABRT, SIGTERM
25 from signal import signal, SIGINT, SIGABRT, SIGTERM
26 try:
26 try:
27 from signal import SIGKILL
27 from signal import SIGKILL
28 except ImportError:
28 except ImportError:
29 SIGKILL=None
29 SIGKILL=None
30
30
31 try:
31 try:
32 import cPickle
32 import cPickle
33 pickle = cPickle
33 pickle = cPickle
34 except:
34 except:
35 cPickle = None
35 cPickle = None
36 import pickle
36 import pickle
37
37
38 # System library imports
38 # System library imports
39 import zmq
39 import zmq
40 from zmq.log import handlers
40 from zmq.log import handlers
41
41
42 from IPython.external.decorator import decorator
42 from IPython.external.decorator import decorator
43
43
44 # IPython imports
44 # IPython imports
45 from IPython.config.application import Application
45 from IPython.config.application import Application
46 from IPython.utils.localinterfaces import LOCALHOST, PUBLIC_IPS
46 from IPython.utils.localinterfaces import localhost, is_public_ip, public_ips
47 from IPython.kernel.zmq.log import EnginePUBHandler
47 from IPython.kernel.zmq.log import EnginePUBHandler
48 from IPython.kernel.zmq.serialize import (
48 from IPython.kernel.zmq.serialize import (
49 unserialize_object, serialize_object, pack_apply_message, unpack_apply_message
49 unserialize_object, serialize_object, pack_apply_message, unpack_apply_message
50 )
50 )
51
51
52 #-----------------------------------------------------------------------------
52 #-----------------------------------------------------------------------------
53 # Classes
53 # Classes
54 #-----------------------------------------------------------------------------
54 #-----------------------------------------------------------------------------
55
55
56 class Namespace(dict):
56 class Namespace(dict):
57 """Subclass of dict for attribute access to keys."""
57 """Subclass of dict for attribute access to keys."""
58
58
59 def __getattr__(self, key):
59 def __getattr__(self, key):
60 """getattr aliased to getitem"""
60 """getattr aliased to getitem"""
61 if key in self.iterkeys():
61 if key in self.iterkeys():
62 return self[key]
62 return self[key]
63 else:
63 else:
64 raise NameError(key)
64 raise NameError(key)
65
65
66 def __setattr__(self, key, value):
66 def __setattr__(self, key, value):
67 """setattr aliased to setitem, with strict"""
67 """setattr aliased to setitem, with strict"""
68 if hasattr(dict, key):
68 if hasattr(dict, key):
69 raise KeyError("Cannot override dict keys %r"%key)
69 raise KeyError("Cannot override dict keys %r"%key)
70 self[key] = value
70 self[key] = value
71
71
72
72
73 class ReverseDict(dict):
73 class ReverseDict(dict):
74 """simple double-keyed subset of dict methods."""
74 """simple double-keyed subset of dict methods."""
75
75
76 def __init__(self, *args, **kwargs):
76 def __init__(self, *args, **kwargs):
77 dict.__init__(self, *args, **kwargs)
77 dict.__init__(self, *args, **kwargs)
78 self._reverse = dict()
78 self._reverse = dict()
79 for key, value in self.iteritems():
79 for key, value in self.iteritems():
80 self._reverse[value] = key
80 self._reverse[value] = key
81
81
82 def __getitem__(self, key):
82 def __getitem__(self, key):
83 try:
83 try:
84 return dict.__getitem__(self, key)
84 return dict.__getitem__(self, key)
85 except KeyError:
85 except KeyError:
86 return self._reverse[key]
86 return self._reverse[key]
87
87
88 def __setitem__(self, key, value):
88 def __setitem__(self, key, value):
89 if key in self._reverse:
89 if key in self._reverse:
90 raise KeyError("Can't have key %r on both sides!"%key)
90 raise KeyError("Can't have key %r on both sides!"%key)
91 dict.__setitem__(self, key, value)
91 dict.__setitem__(self, key, value)
92 self._reverse[value] = key
92 self._reverse[value] = key
93
93
94 def pop(self, key):
94 def pop(self, key):
95 value = dict.pop(self, key)
95 value = dict.pop(self, key)
96 self._reverse.pop(value)
96 self._reverse.pop(value)
97 return value
97 return value
98
98
99 def get(self, key, default=None):
99 def get(self, key, default=None):
100 try:
100 try:
101 return self[key]
101 return self[key]
102 except KeyError:
102 except KeyError:
103 return default
103 return default
104
104
105 #-----------------------------------------------------------------------------
105 #-----------------------------------------------------------------------------
106 # Functions
106 # Functions
107 #-----------------------------------------------------------------------------
107 #-----------------------------------------------------------------------------
108
108
109 @decorator
109 @decorator
110 def log_errors(f, self, *args, **kwargs):
110 def log_errors(f, self, *args, **kwargs):
111 """decorator to log unhandled exceptions raised in a method.
111 """decorator to log unhandled exceptions raised in a method.
112
112
113 For use wrapping on_recv callbacks, so that exceptions
113 For use wrapping on_recv callbacks, so that exceptions
114 do not cause the stream to be closed.
114 do not cause the stream to be closed.
115 """
115 """
116 try:
116 try:
117 return f(self, *args, **kwargs)
117 return f(self, *args, **kwargs)
118 except Exception:
118 except Exception:
119 self.log.error("Uncaught exception in %r" % f, exc_info=True)
119 self.log.error("Uncaught exception in %r" % f, exc_info=True)
120
120
121
121
122 def is_url(url):
122 def is_url(url):
123 """boolean check for whether a string is a zmq url"""
123 """boolean check for whether a string is a zmq url"""
124 if '://' not in url:
124 if '://' not in url:
125 return False
125 return False
126 proto, addr = url.split('://', 1)
126 proto, addr = url.split('://', 1)
127 if proto.lower() not in ['tcp','pgm','epgm','ipc','inproc']:
127 if proto.lower() not in ['tcp','pgm','epgm','ipc','inproc']:
128 return False
128 return False
129 return True
129 return True
130
130
131 def validate_url(url):
131 def validate_url(url):
132 """validate a url for zeromq"""
132 """validate a url for zeromq"""
133 if not isinstance(url, basestring):
133 if not isinstance(url, basestring):
134 raise TypeError("url must be a string, not %r"%type(url))
134 raise TypeError("url must be a string, not %r"%type(url))
135 url = url.lower()
135 url = url.lower()
136
136
137 proto_addr = url.split('://')
137 proto_addr = url.split('://')
138 assert len(proto_addr) == 2, 'Invalid url: %r'%url
138 assert len(proto_addr) == 2, 'Invalid url: %r'%url
139 proto, addr = proto_addr
139 proto, addr = proto_addr
140 assert proto in ['tcp','pgm','epgm','ipc','inproc'], "Invalid protocol: %r"%proto
140 assert proto in ['tcp','pgm','epgm','ipc','inproc'], "Invalid protocol: %r"%proto
141
141
142 # domain pattern adapted from http://www.regexlib.com/REDetails.aspx?regexp_id=391
142 # domain pattern adapted from http://www.regexlib.com/REDetails.aspx?regexp_id=391
143 # author: Remi Sabourin
143 # author: Remi Sabourin
144 pat = re.compile(r'^([\w\d]([\w\d\-]{0,61}[\w\d])?\.)*[\w\d]([\w\d\-]{0,61}[\w\d])?$')
144 pat = re.compile(r'^([\w\d]([\w\d\-]{0,61}[\w\d])?\.)*[\w\d]([\w\d\-]{0,61}[\w\d])?$')
145
145
146 if proto == 'tcp':
146 if proto == 'tcp':
147 lis = addr.split(':')
147 lis = addr.split(':')
148 assert len(lis) == 2, 'Invalid url: %r'%url
148 assert len(lis) == 2, 'Invalid url: %r'%url
149 addr,s_port = lis
149 addr,s_port = lis
150 try:
150 try:
151 port = int(s_port)
151 port = int(s_port)
152 except ValueError:
152 except ValueError:
153 raise AssertionError("Invalid port %r in url: %r"%(port, url))
153 raise AssertionError("Invalid port %r in url: %r"%(port, url))
154
154
155 assert addr == '*' or pat.match(addr) is not None, 'Invalid url: %r'%url
155 assert addr == '*' or pat.match(addr) is not None, 'Invalid url: %r'%url
156
156
157 else:
157 else:
158 # only validate tcp urls currently
158 # only validate tcp urls currently
159 pass
159 pass
160
160
161 return True
161 return True
162
162
163
163
164 def validate_url_container(container):
164 def validate_url_container(container):
165 """validate a potentially nested collection of urls."""
165 """validate a potentially nested collection of urls."""
166 if isinstance(container, basestring):
166 if isinstance(container, basestring):
167 url = container
167 url = container
168 return validate_url(url)
168 return validate_url(url)
169 elif isinstance(container, dict):
169 elif isinstance(container, dict):
170 container = container.itervalues()
170 container = container.itervalues()
171
171
172 for element in container:
172 for element in container:
173 validate_url_container(element)
173 validate_url_container(element)
174
174
175
175
176 def split_url(url):
176 def split_url(url):
177 """split a zmq url (tcp://ip:port) into ('tcp','ip','port')."""
177 """split a zmq url (tcp://ip:port) into ('tcp','ip','port')."""
178 proto_addr = url.split('://')
178 proto_addr = url.split('://')
179 assert len(proto_addr) == 2, 'Invalid url: %r'%url
179 assert len(proto_addr) == 2, 'Invalid url: %r'%url
180 proto, addr = proto_addr
180 proto, addr = proto_addr
181 lis = addr.split(':')
181 lis = addr.split(':')
182 assert len(lis) == 2, 'Invalid url: %r'%url
182 assert len(lis) == 2, 'Invalid url: %r'%url
183 addr,s_port = lis
183 addr,s_port = lis
184 return proto,addr,s_port
184 return proto,addr,s_port
185
185
186 def disambiguate_ip_address(ip, location=None):
186 def disambiguate_ip_address(ip, location=None):
187 """turn multi-ip interfaces '0.0.0.0' and '*' into connectable
187 """turn multi-ip interfaces '0.0.0.0' and '*' into connectable
188 ones, based on the location (default interpretation of location is localhost)."""
188 ones, based on the location (default interpretation of location is localhost)."""
189 if ip in ('0.0.0.0', '*'):
189 if ip in ('0.0.0.0', '*'):
190 if location is None or location in PUBLIC_IPS or not PUBLIC_IPS:
190 if location is None or is_public_ip(location) or not public_ips():
191 # If location is unspecified or cannot be determined, assume local
191 # If location is unspecified or cannot be determined, assume local
192 ip = LOCALHOST
192 ip = localhost()
193 elif location:
193 elif location:
194 return location
194 return location
195 return ip
195 return ip
196
196
197 def disambiguate_url(url, location=None):
197 def disambiguate_url(url, location=None):
198 """turn multi-ip interfaces '0.0.0.0' and '*' into connectable
198 """turn multi-ip interfaces '0.0.0.0' and '*' into connectable
199 ones, based on the location (default interpretation is localhost).
199 ones, based on the location (default interpretation is localhost).
200
200
201 This is for zeromq urls, such as tcp://*:10101."""
201 This is for zeromq urls, such as tcp://*:10101."""
202 try:
202 try:
203 proto,ip,port = split_url(url)
203 proto,ip,port = split_url(url)
204 except AssertionError:
204 except AssertionError:
205 # probably not tcp url; could be ipc, etc.
205 # probably not tcp url; could be ipc, etc.
206 return url
206 return url
207
207
208 ip = disambiguate_ip_address(ip,location)
208 ip = disambiguate_ip_address(ip,location)
209
209
210 return "%s://%s:%s"%(proto,ip,port)
210 return "%s://%s:%s"%(proto,ip,port)
211
211
212
212
213 #--------------------------------------------------------------------------
213 #--------------------------------------------------------------------------
214 # helpers for implementing old MEC API via view.apply
214 # helpers for implementing old MEC API via view.apply
215 #--------------------------------------------------------------------------
215 #--------------------------------------------------------------------------
216
216
217 def interactive(f):
217 def interactive(f):
218 """decorator for making functions appear as interactively defined.
218 """decorator for making functions appear as interactively defined.
219 This results in the function being linked to the user_ns as globals()
219 This results in the function being linked to the user_ns as globals()
220 instead of the module globals().
220 instead of the module globals().
221 """
221 """
222 f.__module__ = '__main__'
222 f.__module__ = '__main__'
223 return f
223 return f
224
224
225 @interactive
225 @interactive
226 def _push(**ns):
226 def _push(**ns):
227 """helper method for implementing `client.push` via `client.apply`"""
227 """helper method for implementing `client.push` via `client.apply`"""
228 user_ns = globals()
228 user_ns = globals()
229 tmp = '_IP_PUSH_TMP_'
229 tmp = '_IP_PUSH_TMP_'
230 while tmp in user_ns:
230 while tmp in user_ns:
231 tmp = tmp + '_'
231 tmp = tmp + '_'
232 try:
232 try:
233 for name, value in ns.iteritems():
233 for name, value in ns.iteritems():
234 user_ns[tmp] = value
234 user_ns[tmp] = value
235 exec "%s = %s" % (name, tmp) in user_ns
235 exec "%s = %s" % (name, tmp) in user_ns
236 finally:
236 finally:
237 user_ns.pop(tmp, None)
237 user_ns.pop(tmp, None)
238
238
239 @interactive
239 @interactive
240 def _pull(keys):
240 def _pull(keys):
241 """helper method for implementing `client.pull` via `client.apply`"""
241 """helper method for implementing `client.pull` via `client.apply`"""
242 if isinstance(keys, (list,tuple, set)):
242 if isinstance(keys, (list,tuple, set)):
243 return map(lambda key: eval(key, globals()), keys)
243 return map(lambda key: eval(key, globals()), keys)
244 else:
244 else:
245 return eval(keys, globals())
245 return eval(keys, globals())
246
246
247 @interactive
247 @interactive
248 def _execute(code):
248 def _execute(code):
249 """helper method for implementing `client.execute` via `client.apply`"""
249 """helper method for implementing `client.execute` via `client.apply`"""
250 exec code in globals()
250 exec code in globals()
251
251
252 #--------------------------------------------------------------------------
252 #--------------------------------------------------------------------------
253 # extra process management utilities
253 # extra process management utilities
254 #--------------------------------------------------------------------------
254 #--------------------------------------------------------------------------
255
255
256 _random_ports = set()
256 _random_ports = set()
257
257
258 def select_random_ports(n):
258 def select_random_ports(n):
259 """Selects and return n random ports that are available."""
259 """Selects and return n random ports that are available."""
260 ports = []
260 ports = []
261 for i in xrange(n):
261 for i in xrange(n):
262 sock = socket.socket()
262 sock = socket.socket()
263 sock.bind(('', 0))
263 sock.bind(('', 0))
264 while sock.getsockname()[1] in _random_ports:
264 while sock.getsockname()[1] in _random_ports:
265 sock.close()
265 sock.close()
266 sock = socket.socket()
266 sock = socket.socket()
267 sock.bind(('', 0))
267 sock.bind(('', 0))
268 ports.append(sock)
268 ports.append(sock)
269 for i, sock in enumerate(ports):
269 for i, sock in enumerate(ports):
270 port = sock.getsockname()[1]
270 port = sock.getsockname()[1]
271 sock.close()
271 sock.close()
272 ports[i] = port
272 ports[i] = port
273 _random_ports.add(port)
273 _random_ports.add(port)
274 return ports
274 return ports
275
275
276 def signal_children(children):
276 def signal_children(children):
277 """Relay interupt/term signals to children, for more solid process cleanup."""
277 """Relay interupt/term signals to children, for more solid process cleanup."""
278 def terminate_children(sig, frame):
278 def terminate_children(sig, frame):
279 log = Application.instance().log
279 log = Application.instance().log
280 log.critical("Got signal %i, terminating children..."%sig)
280 log.critical("Got signal %i, terminating children..."%sig)
281 for child in children:
281 for child in children:
282 child.terminate()
282 child.terminate()
283
283
284 sys.exit(sig != SIGINT)
284 sys.exit(sig != SIGINT)
285 # sys.exit(sig)
285 # sys.exit(sig)
286 for sig in (SIGINT, SIGABRT, SIGTERM):
286 for sig in (SIGINT, SIGABRT, SIGTERM):
287 signal(sig, terminate_children)
287 signal(sig, terminate_children)
288
288
289 def generate_exec_key(keyfile):
289 def generate_exec_key(keyfile):
290 import uuid
290 import uuid
291 newkey = str(uuid.uuid4())
291 newkey = str(uuid.uuid4())
292 with open(keyfile, 'w') as f:
292 with open(keyfile, 'w') as f:
293 # f.write('ipython-key ')
293 # f.write('ipython-key ')
294 f.write(newkey+'\n')
294 f.write(newkey+'\n')
295 # set user-only RW permissions (0600)
295 # set user-only RW permissions (0600)
296 # this will have no effect on Windows
296 # this will have no effect on Windows
297 os.chmod(keyfile, stat.S_IRUSR|stat.S_IWUSR)
297 os.chmod(keyfile, stat.S_IRUSR|stat.S_IWUSR)
298
298
299
299
300 def integer_loglevel(loglevel):
300 def integer_loglevel(loglevel):
301 try:
301 try:
302 loglevel = int(loglevel)
302 loglevel = int(loglevel)
303 except ValueError:
303 except ValueError:
304 if isinstance(loglevel, str):
304 if isinstance(loglevel, str):
305 loglevel = getattr(logging, loglevel)
305 loglevel = getattr(logging, loglevel)
306 return loglevel
306 return loglevel
307
307
308 def connect_logger(logname, context, iface, root="ip", loglevel=logging.DEBUG):
308 def connect_logger(logname, context, iface, root="ip", loglevel=logging.DEBUG):
309 logger = logging.getLogger(logname)
309 logger = logging.getLogger(logname)
310 if any([isinstance(h, handlers.PUBHandler) for h in logger.handlers]):
310 if any([isinstance(h, handlers.PUBHandler) for h in logger.handlers]):
311 # don't add a second PUBHandler
311 # don't add a second PUBHandler
312 return
312 return
313 loglevel = integer_loglevel(loglevel)
313 loglevel = integer_loglevel(loglevel)
314 lsock = context.socket(zmq.PUB)
314 lsock = context.socket(zmq.PUB)
315 lsock.connect(iface)
315 lsock.connect(iface)
316 handler = handlers.PUBHandler(lsock)
316 handler = handlers.PUBHandler(lsock)
317 handler.setLevel(loglevel)
317 handler.setLevel(loglevel)
318 handler.root_topic = root
318 handler.root_topic = root
319 logger.addHandler(handler)
319 logger.addHandler(handler)
320 logger.setLevel(loglevel)
320 logger.setLevel(loglevel)
321
321
322 def connect_engine_logger(context, iface, engine, loglevel=logging.DEBUG):
322 def connect_engine_logger(context, iface, engine, loglevel=logging.DEBUG):
323 logger = logging.getLogger()
323 logger = logging.getLogger()
324 if any([isinstance(h, handlers.PUBHandler) for h in logger.handlers]):
324 if any([isinstance(h, handlers.PUBHandler) for h in logger.handlers]):
325 # don't add a second PUBHandler
325 # don't add a second PUBHandler
326 return
326 return
327 loglevel = integer_loglevel(loglevel)
327 loglevel = integer_loglevel(loglevel)
328 lsock = context.socket(zmq.PUB)
328 lsock = context.socket(zmq.PUB)
329 lsock.connect(iface)
329 lsock.connect(iface)
330 handler = EnginePUBHandler(engine, lsock)
330 handler = EnginePUBHandler(engine, lsock)
331 handler.setLevel(loglevel)
331 handler.setLevel(loglevel)
332 logger.addHandler(handler)
332 logger.addHandler(handler)
333 logger.setLevel(loglevel)
333 logger.setLevel(loglevel)
334 return logger
334 return logger
335
335
336 def local_logger(logname, loglevel=logging.DEBUG):
336 def local_logger(logname, loglevel=logging.DEBUG):
337 loglevel = integer_loglevel(loglevel)
337 loglevel = integer_loglevel(loglevel)
338 logger = logging.getLogger(logname)
338 logger = logging.getLogger(logname)
339 if any([isinstance(h, logging.StreamHandler) for h in logger.handlers]):
339 if any([isinstance(h, logging.StreamHandler) for h in logger.handlers]):
340 # don't add a second StreamHandler
340 # don't add a second StreamHandler
341 return
341 return
342 handler = logging.StreamHandler()
342 handler = logging.StreamHandler()
343 handler.setLevel(loglevel)
343 handler.setLevel(loglevel)
344 formatter = logging.Formatter("%(asctime)s.%(msecs).03d [%(name)s] %(message)s",
344 formatter = logging.Formatter("%(asctime)s.%(msecs).03d [%(name)s] %(message)s",
345 datefmt="%Y-%m-%d %H:%M:%S")
345 datefmt="%Y-%m-%d %H:%M:%S")
346 handler.setFormatter(formatter)
346 handler.setFormatter(formatter)
347
347
348 logger.addHandler(handler)
348 logger.addHandler(handler)
349 logger.setLevel(loglevel)
349 logger.setLevel(loglevel)
350 return logger
350 return logger
351
351
352 def set_hwm(sock, hwm=0):
352 def set_hwm(sock, hwm=0):
353 """set zmq High Water Mark on a socket
353 """set zmq High Water Mark on a socket
354
354
355 in a way that always works for various pyzmq / libzmq versions.
355 in a way that always works for various pyzmq / libzmq versions.
356 """
356 """
357 import zmq
357 import zmq
358
358
359 for key in ('HWM', 'SNDHWM', 'RCVHWM'):
359 for key in ('HWM', 'SNDHWM', 'RCVHWM'):
360 opt = getattr(zmq, key, None)
360 opt = getattr(zmq, key, None)
361 if opt is None:
361 if opt is None:
362 continue
362 continue
363 try:
363 try:
364 sock.setsockopt(opt, hwm)
364 sock.setsockopt(opt, hwm)
365 except zmq.ZMQError:
365 except zmq.ZMQError:
366 pass
366 pass
367
367
368 No newline at end of file
368
@@ -1,382 +1,382 b''
1 """ A minimal application using the Qt console-style IPython frontend.
1 """ A minimal application using the Qt console-style IPython frontend.
2
2
3 This is not a complete console app, as subprocess will not be able to receive
3 This is not a complete console app, as subprocess will not be able to receive
4 input, there is no real readline support, among other limitations.
4 input, there is no real readline support, among other limitations.
5
5
6 Authors:
6 Authors:
7
7
8 * Evan Patterson
8 * Evan Patterson
9 * Min RK
9 * Min RK
10 * Erik Tollerud
10 * Erik Tollerud
11 * Fernando Perez
11 * Fernando Perez
12 * Bussonnier Matthias
12 * Bussonnier Matthias
13 * Thomas Kluyver
13 * Thomas Kluyver
14 * Paul Ivanov
14 * Paul Ivanov
15
15
16 """
16 """
17
17
18 #-----------------------------------------------------------------------------
18 #-----------------------------------------------------------------------------
19 # Imports
19 # Imports
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21
21
22 # stdlib imports
22 # stdlib imports
23 import os
23 import os
24 import signal
24 import signal
25 import sys
25 import sys
26
26
27 # If run on Windows, install an exception hook which pops up a
27 # If run on Windows, install an exception hook which pops up a
28 # message box. Pythonw.exe hides the console, so without this
28 # message box. Pythonw.exe hides the console, so without this
29 # the application silently fails to load.
29 # the application silently fails to load.
30 #
30 #
31 # We always install this handler, because the expectation is for
31 # We always install this handler, because the expectation is for
32 # qtconsole to bring up a GUI even if called from the console.
32 # qtconsole to bring up a GUI even if called from the console.
33 # The old handler is called, so the exception is printed as well.
33 # The old handler is called, so the exception is printed as well.
34 # If desired, check for pythonw with an additional condition
34 # If desired, check for pythonw with an additional condition
35 # (sys.executable.lower().find('pythonw.exe') >= 0).
35 # (sys.executable.lower().find('pythonw.exe') >= 0).
36 if os.name == 'nt':
36 if os.name == 'nt':
37 old_excepthook = sys.excepthook
37 old_excepthook = sys.excepthook
38
38
39 def gui_excepthook(exctype, value, tb):
39 def gui_excepthook(exctype, value, tb):
40 try:
40 try:
41 import ctypes, traceback
41 import ctypes, traceback
42 MB_ICONERROR = 0x00000010L
42 MB_ICONERROR = 0x00000010L
43 title = u'Error starting IPython QtConsole'
43 title = u'Error starting IPython QtConsole'
44 msg = u''.join(traceback.format_exception(exctype, value, tb))
44 msg = u''.join(traceback.format_exception(exctype, value, tb))
45 ctypes.windll.user32.MessageBoxW(0, msg, title, MB_ICONERROR)
45 ctypes.windll.user32.MessageBoxW(0, msg, title, MB_ICONERROR)
46 finally:
46 finally:
47 # Also call the old exception hook to let it do
47 # Also call the old exception hook to let it do
48 # its thing too.
48 # its thing too.
49 old_excepthook(exctype, value, tb)
49 old_excepthook(exctype, value, tb)
50
50
51 sys.excepthook = gui_excepthook
51 sys.excepthook = gui_excepthook
52
52
53 # System library imports
53 # System library imports
54 from IPython.external.qt import QtCore, QtGui
54 from IPython.external.qt import QtCore, QtGui
55
55
56 # Local imports
56 # Local imports
57 from IPython.config.application import catch_config_error
57 from IPython.config.application import catch_config_error
58 from IPython.core.application import BaseIPythonApplication
58 from IPython.core.application import BaseIPythonApplication
59 from IPython.qt.console.ipython_widget import IPythonWidget
59 from IPython.qt.console.ipython_widget import IPythonWidget
60 from IPython.qt.console.rich_ipython_widget import RichIPythonWidget
60 from IPython.qt.console.rich_ipython_widget import RichIPythonWidget
61 from IPython.qt.console import styles
61 from IPython.qt.console import styles
62 from IPython.qt.console.mainwindow import MainWindow
62 from IPython.qt.console.mainwindow import MainWindow
63 from IPython.qt.client import QtKernelClient
63 from IPython.qt.client import QtKernelClient
64 from IPython.qt.manager import QtKernelManager
64 from IPython.qt.manager import QtKernelManager
65 from IPython.utils.traitlets import (
65 from IPython.utils.traitlets import (
66 Dict, Unicode, CBool, Any
66 Dict, Unicode, CBool, Any
67 )
67 )
68
68
69 from IPython.consoleapp import (
69 from IPython.consoleapp import (
70 IPythonConsoleApp, app_aliases, app_flags, flags, aliases
70 IPythonConsoleApp, app_aliases, app_flags, flags, aliases
71 )
71 )
72
72
73 #-----------------------------------------------------------------------------
73 #-----------------------------------------------------------------------------
74 # Network Constants
74 # Network Constants
75 #-----------------------------------------------------------------------------
75 #-----------------------------------------------------------------------------
76
76
77 from IPython.utils.localinterfaces import LOCALHOST, LOCAL_IPS
77 from IPython.utils.localinterfaces import is_local_ip
78
78
79 #-----------------------------------------------------------------------------
79 #-----------------------------------------------------------------------------
80 # Globals
80 # Globals
81 #-----------------------------------------------------------------------------
81 #-----------------------------------------------------------------------------
82
82
83 _examples = """
83 _examples = """
84 ipython qtconsole # start the qtconsole
84 ipython qtconsole # start the qtconsole
85 ipython qtconsole --matplotlib=inline # start with matplotlib inline plotting mode
85 ipython qtconsole --matplotlib=inline # start with matplotlib inline plotting mode
86 """
86 """
87
87
88 #-----------------------------------------------------------------------------
88 #-----------------------------------------------------------------------------
89 # Aliases and Flags
89 # Aliases and Flags
90 #-----------------------------------------------------------------------------
90 #-----------------------------------------------------------------------------
91
91
92 # start with copy of flags
92 # start with copy of flags
93 flags = dict(flags)
93 flags = dict(flags)
94 qt_flags = {
94 qt_flags = {
95 'plain' : ({'IPythonQtConsoleApp' : {'plain' : True}},
95 'plain' : ({'IPythonQtConsoleApp' : {'plain' : True}},
96 "Disable rich text support."),
96 "Disable rich text support."),
97 }
97 }
98
98
99 # and app_flags from the Console Mixin
99 # and app_flags from the Console Mixin
100 qt_flags.update(app_flags)
100 qt_flags.update(app_flags)
101 # add frontend flags to the full set
101 # add frontend flags to the full set
102 flags.update(qt_flags)
102 flags.update(qt_flags)
103
103
104 # start with copy of front&backend aliases list
104 # start with copy of front&backend aliases list
105 aliases = dict(aliases)
105 aliases = dict(aliases)
106 qt_aliases = dict(
106 qt_aliases = dict(
107 style = 'IPythonWidget.syntax_style',
107 style = 'IPythonWidget.syntax_style',
108 stylesheet = 'IPythonQtConsoleApp.stylesheet',
108 stylesheet = 'IPythonQtConsoleApp.stylesheet',
109 colors = 'ZMQInteractiveShell.colors',
109 colors = 'ZMQInteractiveShell.colors',
110
110
111 editor = 'IPythonWidget.editor',
111 editor = 'IPythonWidget.editor',
112 paging = 'ConsoleWidget.paging',
112 paging = 'ConsoleWidget.paging',
113 )
113 )
114 # and app_aliases from the Console Mixin
114 # and app_aliases from the Console Mixin
115 qt_aliases.update(app_aliases)
115 qt_aliases.update(app_aliases)
116 qt_aliases.update({'gui-completion':'ConsoleWidget.gui_completion'})
116 qt_aliases.update({'gui-completion':'ConsoleWidget.gui_completion'})
117 # add frontend aliases to the full set
117 # add frontend aliases to the full set
118 aliases.update(qt_aliases)
118 aliases.update(qt_aliases)
119
119
120 # get flags&aliases into sets, and remove a couple that
120 # get flags&aliases into sets, and remove a couple that
121 # shouldn't be scrubbed from backend flags:
121 # shouldn't be scrubbed from backend flags:
122 qt_aliases = set(qt_aliases.keys())
122 qt_aliases = set(qt_aliases.keys())
123 qt_aliases.remove('colors')
123 qt_aliases.remove('colors')
124 qt_flags = set(qt_flags.keys())
124 qt_flags = set(qt_flags.keys())
125
125
126 #-----------------------------------------------------------------------------
126 #-----------------------------------------------------------------------------
127 # Classes
127 # Classes
128 #-----------------------------------------------------------------------------
128 #-----------------------------------------------------------------------------
129
129
130 #-----------------------------------------------------------------------------
130 #-----------------------------------------------------------------------------
131 # IPythonQtConsole
131 # IPythonQtConsole
132 #-----------------------------------------------------------------------------
132 #-----------------------------------------------------------------------------
133
133
134
134
135 class IPythonQtConsoleApp(BaseIPythonApplication, IPythonConsoleApp):
135 class IPythonQtConsoleApp(BaseIPythonApplication, IPythonConsoleApp):
136 name = 'ipython-qtconsole'
136 name = 'ipython-qtconsole'
137
137
138 description = """
138 description = """
139 The IPython QtConsole.
139 The IPython QtConsole.
140
140
141 This launches a Console-style application using Qt. It is not a full
141 This launches a Console-style application using Qt. It is not a full
142 console, in that launched terminal subprocesses will not be able to accept
142 console, in that launched terminal subprocesses will not be able to accept
143 input.
143 input.
144
144
145 The QtConsole supports various extra features beyond the Terminal IPython
145 The QtConsole supports various extra features beyond the Terminal IPython
146 shell, such as inline plotting with matplotlib, via:
146 shell, such as inline plotting with matplotlib, via:
147
147
148 ipython qtconsole --matplotlib=inline
148 ipython qtconsole --matplotlib=inline
149
149
150 as well as saving your session as HTML, and printing the output.
150 as well as saving your session as HTML, and printing the output.
151
151
152 """
152 """
153 examples = _examples
153 examples = _examples
154
154
155 classes = [IPythonWidget] + IPythonConsoleApp.classes
155 classes = [IPythonWidget] + IPythonConsoleApp.classes
156 flags = Dict(flags)
156 flags = Dict(flags)
157 aliases = Dict(aliases)
157 aliases = Dict(aliases)
158 frontend_flags = Any(qt_flags)
158 frontend_flags = Any(qt_flags)
159 frontend_aliases = Any(qt_aliases)
159 frontend_aliases = Any(qt_aliases)
160 kernel_client_class = QtKernelClient
160 kernel_client_class = QtKernelClient
161 kernel_manager_class = QtKernelManager
161 kernel_manager_class = QtKernelManager
162
162
163 stylesheet = Unicode('', config=True,
163 stylesheet = Unicode('', config=True,
164 help="path to a custom CSS stylesheet")
164 help="path to a custom CSS stylesheet")
165
165
166 hide_menubar = CBool(False, config=True,
166 hide_menubar = CBool(False, config=True,
167 help="Start the console window with the menu bar hidden.")
167 help="Start the console window with the menu bar hidden.")
168
168
169 maximize = CBool(False, config=True,
169 maximize = CBool(False, config=True,
170 help="Start the console window maximized.")
170 help="Start the console window maximized.")
171
171
172 plain = CBool(False, config=True,
172 plain = CBool(False, config=True,
173 help="Use a plaintext widget instead of rich text (plain can't print/save).")
173 help="Use a plaintext widget instead of rich text (plain can't print/save).")
174
174
175 def _plain_changed(self, name, old, new):
175 def _plain_changed(self, name, old, new):
176 kind = 'plain' if new else 'rich'
176 kind = 'plain' if new else 'rich'
177 self.config.ConsoleWidget.kind = kind
177 self.config.ConsoleWidget.kind = kind
178 if new:
178 if new:
179 self.widget_factory = IPythonWidget
179 self.widget_factory = IPythonWidget
180 else:
180 else:
181 self.widget_factory = RichIPythonWidget
181 self.widget_factory = RichIPythonWidget
182
182
183 # the factory for creating a widget
183 # the factory for creating a widget
184 widget_factory = Any(RichIPythonWidget)
184 widget_factory = Any(RichIPythonWidget)
185
185
186 def parse_command_line(self, argv=None):
186 def parse_command_line(self, argv=None):
187 super(IPythonQtConsoleApp, self).parse_command_line(argv)
187 super(IPythonQtConsoleApp, self).parse_command_line(argv)
188 self.build_kernel_argv(argv)
188 self.build_kernel_argv(argv)
189
189
190
190
191 def new_frontend_master(self):
191 def new_frontend_master(self):
192 """ Create and return new frontend attached to new kernel, launched on localhost.
192 """ Create and return new frontend attached to new kernel, launched on localhost.
193 """
193 """
194 kernel_manager = self.kernel_manager_class(
194 kernel_manager = self.kernel_manager_class(
195 connection_file=self._new_connection_file(),
195 connection_file=self._new_connection_file(),
196 parent=self,
196 parent=self,
197 autorestart=True,
197 autorestart=True,
198 )
198 )
199 # start the kernel
199 # start the kernel
200 kwargs = dict()
200 kwargs = dict()
201 kwargs['extra_arguments'] = self.kernel_argv
201 kwargs['extra_arguments'] = self.kernel_argv
202 kernel_manager.start_kernel(**kwargs)
202 kernel_manager.start_kernel(**kwargs)
203 kernel_manager.client_factory = self.kernel_client_class
203 kernel_manager.client_factory = self.kernel_client_class
204 kernel_client = kernel_manager.client()
204 kernel_client = kernel_manager.client()
205 kernel_client.start_channels(shell=True, iopub=True)
205 kernel_client.start_channels(shell=True, iopub=True)
206 widget = self.widget_factory(config=self.config,
206 widget = self.widget_factory(config=self.config,
207 local_kernel=True)
207 local_kernel=True)
208 self.init_colors(widget)
208 self.init_colors(widget)
209 widget.kernel_manager = kernel_manager
209 widget.kernel_manager = kernel_manager
210 widget.kernel_client = kernel_client
210 widget.kernel_client = kernel_client
211 widget._existing = False
211 widget._existing = False
212 widget._may_close = True
212 widget._may_close = True
213 widget._confirm_exit = self.confirm_exit
213 widget._confirm_exit = self.confirm_exit
214 return widget
214 return widget
215
215
216 def new_frontend_slave(self, current_widget):
216 def new_frontend_slave(self, current_widget):
217 """Create and return a new frontend attached to an existing kernel.
217 """Create and return a new frontend attached to an existing kernel.
218
218
219 Parameters
219 Parameters
220 ----------
220 ----------
221 current_widget : IPythonWidget
221 current_widget : IPythonWidget
222 The IPythonWidget whose kernel this frontend is to share
222 The IPythonWidget whose kernel this frontend is to share
223 """
223 """
224 kernel_client = self.kernel_client_class(
224 kernel_client = self.kernel_client_class(
225 connection_file=current_widget.kernel_client.connection_file,
225 connection_file=current_widget.kernel_client.connection_file,
226 config = self.config,
226 config = self.config,
227 )
227 )
228 kernel_client.load_connection_file()
228 kernel_client.load_connection_file()
229 kernel_client.start_channels()
229 kernel_client.start_channels()
230 widget = self.widget_factory(config=self.config,
230 widget = self.widget_factory(config=self.config,
231 local_kernel=False)
231 local_kernel=False)
232 self.init_colors(widget)
232 self.init_colors(widget)
233 widget._existing = True
233 widget._existing = True
234 widget._may_close = False
234 widget._may_close = False
235 widget._confirm_exit = False
235 widget._confirm_exit = False
236 widget.kernel_client = kernel_client
236 widget.kernel_client = kernel_client
237 widget.kernel_manager = current_widget.kernel_manager
237 widget.kernel_manager = current_widget.kernel_manager
238 return widget
238 return widget
239
239
240 def init_qt_app(self):
240 def init_qt_app(self):
241 # separate from qt_elements, because it must run first
241 # separate from qt_elements, because it must run first
242 self.app = QtGui.QApplication([])
242 self.app = QtGui.QApplication([])
243
243
244 def init_qt_elements(self):
244 def init_qt_elements(self):
245 # Create the widget.
245 # Create the widget.
246
246
247 base_path = os.path.abspath(os.path.dirname(__file__))
247 base_path = os.path.abspath(os.path.dirname(__file__))
248 icon_path = os.path.join(base_path, 'resources', 'icon', 'IPythonConsole.svg')
248 icon_path = os.path.join(base_path, 'resources', 'icon', 'IPythonConsole.svg')
249 self.app.icon = QtGui.QIcon(icon_path)
249 self.app.icon = QtGui.QIcon(icon_path)
250 QtGui.QApplication.setWindowIcon(self.app.icon)
250 QtGui.QApplication.setWindowIcon(self.app.icon)
251
251
252 ip = self.ip
252 ip = self.ip
253 local_kernel = (not self.existing) or ip in LOCAL_IPS
253 local_kernel = (not self.existing) or is_local_ip(ip)
254 self.widget = self.widget_factory(config=self.config,
254 self.widget = self.widget_factory(config=self.config,
255 local_kernel=local_kernel)
255 local_kernel=local_kernel)
256 self.init_colors(self.widget)
256 self.init_colors(self.widget)
257 self.widget._existing = self.existing
257 self.widget._existing = self.existing
258 self.widget._may_close = not self.existing
258 self.widget._may_close = not self.existing
259 self.widget._confirm_exit = self.confirm_exit
259 self.widget._confirm_exit = self.confirm_exit
260
260
261 self.widget.kernel_manager = self.kernel_manager
261 self.widget.kernel_manager = self.kernel_manager
262 self.widget.kernel_client = self.kernel_client
262 self.widget.kernel_client = self.kernel_client
263 self.window = MainWindow(self.app,
263 self.window = MainWindow(self.app,
264 confirm_exit=self.confirm_exit,
264 confirm_exit=self.confirm_exit,
265 new_frontend_factory=self.new_frontend_master,
265 new_frontend_factory=self.new_frontend_master,
266 slave_frontend_factory=self.new_frontend_slave,
266 slave_frontend_factory=self.new_frontend_slave,
267 )
267 )
268 self.window.log = self.log
268 self.window.log = self.log
269 self.window.add_tab_with_frontend(self.widget)
269 self.window.add_tab_with_frontend(self.widget)
270 self.window.init_menu_bar()
270 self.window.init_menu_bar()
271
271
272 # Ignore on OSX, where there is always a menu bar
272 # Ignore on OSX, where there is always a menu bar
273 if sys.platform != 'darwin' and self.hide_menubar:
273 if sys.platform != 'darwin' and self.hide_menubar:
274 self.window.menuBar().setVisible(False)
274 self.window.menuBar().setVisible(False)
275
275
276 self.window.setWindowTitle('IPython')
276 self.window.setWindowTitle('IPython')
277
277
278 def init_colors(self, widget):
278 def init_colors(self, widget):
279 """Configure the coloring of the widget"""
279 """Configure the coloring of the widget"""
280 # Note: This will be dramatically simplified when colors
280 # Note: This will be dramatically simplified when colors
281 # are removed from the backend.
281 # are removed from the backend.
282
282
283 # parse the colors arg down to current known labels
283 # parse the colors arg down to current known labels
284 try:
284 try:
285 colors = self.config.ZMQInteractiveShell.colors
285 colors = self.config.ZMQInteractiveShell.colors
286 except AttributeError:
286 except AttributeError:
287 colors = None
287 colors = None
288 try:
288 try:
289 style = self.config.IPythonWidget.syntax_style
289 style = self.config.IPythonWidget.syntax_style
290 except AttributeError:
290 except AttributeError:
291 style = None
291 style = None
292 try:
292 try:
293 sheet = self.config.IPythonWidget.style_sheet
293 sheet = self.config.IPythonWidget.style_sheet
294 except AttributeError:
294 except AttributeError:
295 sheet = None
295 sheet = None
296
296
297 # find the value for colors:
297 # find the value for colors:
298 if colors:
298 if colors:
299 colors=colors.lower()
299 colors=colors.lower()
300 if colors in ('lightbg', 'light'):
300 if colors in ('lightbg', 'light'):
301 colors='lightbg'
301 colors='lightbg'
302 elif colors in ('dark', 'linux'):
302 elif colors in ('dark', 'linux'):
303 colors='linux'
303 colors='linux'
304 else:
304 else:
305 colors='nocolor'
305 colors='nocolor'
306 elif style:
306 elif style:
307 if style=='bw':
307 if style=='bw':
308 colors='nocolor'
308 colors='nocolor'
309 elif styles.dark_style(style):
309 elif styles.dark_style(style):
310 colors='linux'
310 colors='linux'
311 else:
311 else:
312 colors='lightbg'
312 colors='lightbg'
313 else:
313 else:
314 colors=None
314 colors=None
315
315
316 # Configure the style
316 # Configure the style
317 if style:
317 if style:
318 widget.style_sheet = styles.sheet_from_template(style, colors)
318 widget.style_sheet = styles.sheet_from_template(style, colors)
319 widget.syntax_style = style
319 widget.syntax_style = style
320 widget._syntax_style_changed()
320 widget._syntax_style_changed()
321 widget._style_sheet_changed()
321 widget._style_sheet_changed()
322 elif colors:
322 elif colors:
323 # use a default dark/light/bw style
323 # use a default dark/light/bw style
324 widget.set_default_style(colors=colors)
324 widget.set_default_style(colors=colors)
325
325
326 if self.stylesheet:
326 if self.stylesheet:
327 # we got an explicit stylesheet
327 # we got an explicit stylesheet
328 if os.path.isfile(self.stylesheet):
328 if os.path.isfile(self.stylesheet):
329 with open(self.stylesheet) as f:
329 with open(self.stylesheet) as f:
330 sheet = f.read()
330 sheet = f.read()
331 else:
331 else:
332 raise IOError("Stylesheet %r not found." % self.stylesheet)
332 raise IOError("Stylesheet %r not found." % self.stylesheet)
333 if sheet:
333 if sheet:
334 widget.style_sheet = sheet
334 widget.style_sheet = sheet
335 widget._style_sheet_changed()
335 widget._style_sheet_changed()
336
336
337
337
338 def init_signal(self):
338 def init_signal(self):
339 """allow clean shutdown on sigint"""
339 """allow clean shutdown on sigint"""
340 signal.signal(signal.SIGINT, lambda sig, frame: self.exit(-2))
340 signal.signal(signal.SIGINT, lambda sig, frame: self.exit(-2))
341 # need a timer, so that QApplication doesn't block until a real
341 # need a timer, so that QApplication doesn't block until a real
342 # Qt event fires (can require mouse movement)
342 # Qt event fires (can require mouse movement)
343 # timer trick from http://stackoverflow.com/q/4938723/938949
343 # timer trick from http://stackoverflow.com/q/4938723/938949
344 timer = QtCore.QTimer()
344 timer = QtCore.QTimer()
345 # Let the interpreter run each 200 ms:
345 # Let the interpreter run each 200 ms:
346 timer.timeout.connect(lambda: None)
346 timer.timeout.connect(lambda: None)
347 timer.start(200)
347 timer.start(200)
348 # hold onto ref, so the timer doesn't get cleaned up
348 # hold onto ref, so the timer doesn't get cleaned up
349 self._sigint_timer = timer
349 self._sigint_timer = timer
350
350
351 @catch_config_error
351 @catch_config_error
352 def initialize(self, argv=None):
352 def initialize(self, argv=None):
353 self.init_qt_app()
353 self.init_qt_app()
354 super(IPythonQtConsoleApp, self).initialize(argv)
354 super(IPythonQtConsoleApp, self).initialize(argv)
355 IPythonConsoleApp.initialize(self,argv)
355 IPythonConsoleApp.initialize(self,argv)
356 self.init_qt_elements()
356 self.init_qt_elements()
357 self.init_signal()
357 self.init_signal()
358
358
359 def start(self):
359 def start(self):
360
360
361 # draw the window
361 # draw the window
362 if self.maximize:
362 if self.maximize:
363 self.window.showMaximized()
363 self.window.showMaximized()
364 else:
364 else:
365 self.window.show()
365 self.window.show()
366 self.window.raise_()
366 self.window.raise_()
367
367
368 # Start the application main loop.
368 # Start the application main loop.
369 self.app.exec_()
369 self.app.exec_()
370
370
371 #-----------------------------------------------------------------------------
371 #-----------------------------------------------------------------------------
372 # Main entry point
372 # Main entry point
373 #-----------------------------------------------------------------------------
373 #-----------------------------------------------------------------------------
374
374
375 def main():
375 def main():
376 app = IPythonQtConsoleApp()
376 app = IPythonQtConsoleApp()
377 app.initialize()
377 app.initialize()
378 app.start()
378 app.start()
379
379
380
380
381 if __name__ == '__main__':
381 if __name__ == '__main__':
382 main()
382 main()
@@ -1,55 +1,108 b''
1 """Simple utility for building a list of local IPs using the socket module.
1 """Simple utility for building a list of local IPs using the socket module.
2 This module defines two constants:
2 This module defines two constants:
3
3
4 LOCALHOST : The loopback interface, or the first interface that points to this
4 LOCALHOST : The loopback interface, or the first interface that points to this
5 machine. It will *almost* always be '127.0.0.1'
5 machine. It will *almost* always be '127.0.0.1'
6
6
7 LOCAL_IPS : A list of IP addresses, loopback first, that point to this machine.
7 LOCAL_IPS : A list of IP addresses, loopback first, that point to this machine.
8
8
9 PUBLIC_IPS : A list of public IP addresses that point to this machine.
9 PUBLIC_IPS : A list of public IP addresses that point to this machine.
10 Use these to tell remote clients where to find you.
10 Use these to tell remote clients where to find you.
11 """
11 """
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13 # Copyright (C) 2010-2011 The IPython Development Team
13 # Copyright (C) 2010-2011 The IPython Development Team
14 #
14 #
15 # Distributed under the terms of the BSD License. The full license is in
15 # Distributed under the terms of the BSD License. The full license is in
16 # the file COPYING, distributed as part of this software.
16 # the file COPYING, distributed as part of this software.
17 #-----------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18
18
19 #-----------------------------------------------------------------------------
19 #-----------------------------------------------------------------------------
20 # Imports
20 # Imports
21 #-----------------------------------------------------------------------------
21 #-----------------------------------------------------------------------------
22
22
23 import socket
23 import socket
24
24
25 from .data import uniq_stable
25 from .data import uniq_stable
26
26
27 #-----------------------------------------------------------------------------
27 #-----------------------------------------------------------------------------
28 # Code
28 # Code
29 #-----------------------------------------------------------------------------
29 #-----------------------------------------------------------------------------
30
30
31 LOCAL_IPS = []
31 LOCAL_IPS = []
32 PUBLIC_IPS = []
33
34 LOCALHOST = '127.0.0.1'
35
36 def _only_once(f):
37 """decorator to only run a function once"""
38 f.called = False
39 def wrapped():
40 if f.called:
41 return
42 ret = f()
43 f.called = True
44 return ret
45 return wrapped
46
47 def _requires_ips(f):
48 """decorator to ensure load_ips has been run before f"""
49 def ips_loaded(*args, **kwargs):
50 _load_ips()
51 return f(*args, **kwargs)
52 return ips_loaded
53
54 @_only_once
55 def _load_ips():
56 """load the IPs that point to this machine
57
58 This function will only ever be called once.
59 """
60 global LOCALHOST
32 try:
61 try:
33 LOCAL_IPS = socket.gethostbyname_ex('localhost')[2]
62 LOCAL_IPS[:] = socket.gethostbyname_ex('localhost')[2]
34 except socket.error:
63 except socket.error:
35 pass
64 pass
36
65
37 PUBLIC_IPS = []
38 try:
66 try:
39 hostname = socket.gethostname()
67 hostname = socket.gethostname()
40 PUBLIC_IPS = socket.gethostbyname_ex(hostname)[2]
68 PUBLIC_IPS[:] = socket.gethostbyname_ex(hostname)[2]
41 # try hostname.local, in case hostname has been short-circuited to loopback
69 # try hostname.local, in case hostname has been short-circuited to loopback
42 if not hostname.endswith('.local') and all(ip.startswith('127') for ip in PUBLIC_IPS):
70 if not hostname.endswith('.local') and all(ip.startswith('127') for ip in PUBLIC_IPS):
43 PUBLIC_IPS = socket.gethostbyname_ex(socket.gethostname() + '.local')[2]
71 PUBLIC_IPS[:] = socket.gethostbyname_ex(socket.gethostname() + '.local')[2]
44 except socket.error:
72 except socket.error:
45 pass
73 pass
46 finally:
74 finally:
47 PUBLIC_IPS = uniq_stable(PUBLIC_IPS)
75 PUBLIC_IPS[:] = uniq_stable(PUBLIC_IPS)
48 LOCAL_IPS.extend(PUBLIC_IPS)
76 LOCAL_IPS.extend(PUBLIC_IPS)
49
77
50 # include all-interface aliases: 0.0.0.0 and ''
78 # include all-interface aliases: 0.0.0.0 and ''
51 LOCAL_IPS.extend(['0.0.0.0', ''])
79 LOCAL_IPS.extend(['0.0.0.0', ''])
52
80
53 LOCAL_IPS = uniq_stable(LOCAL_IPS)
81 LOCAL_IPS[:] = uniq_stable(LOCAL_IPS)
54
82
55 LOCALHOST = LOCAL_IPS[0]
83 LOCALHOST = LOCAL_IPS[0]
84
85 @_requires_ips
86 def local_ips():
87 """return the IP addresses that point to this machine"""
88 return LOCAL_IPS
89
90 @_requires_ips
91 def public_ips():
92 """return the IP addresses for this machine that are visible to other machines"""
93 return PUBLIC_IPS
94
95 @_requires_ips
96 def localhost():
97 """return ip for localhost (almost always 127.0.0.1)"""
98 return LOCALHOST
99
100 @_requires_ips
101 def is_local_ip(ip):
102 """does `ip` point to this machine?"""
103 return ip in LOCAL_IPS
104
105 @_requires_ips
106 def is_public_ip(ip):
107 """is `ip` a publicly visible address?"""
108 return ip in PUBLIC_IPS
General Comments 0
You need to be logged in to leave comments. Login now