Show More
@@ -1,391 +1,390 | |||||
1 | """ A minimal application base mixin for all ZMQ based IPython frontends. |
|
1 | """ A minimal application base mixin for all ZMQ based IPython frontends. | |
2 |
|
2 | |||
3 | This is not a complete console app, as subprocess will not be able to receive |
|
3 | This is not a complete console app, as subprocess will not be able to receive | |
4 | input, there is no real readline support, among other limitations. This is a |
|
4 | input, there is no real readline support, among other limitations. This is a | |
5 | refactoring of what used to be the IPython/qt/console/qtconsoleapp.py |
|
5 | refactoring of what used to be the IPython/qt/console/qtconsoleapp.py | |
6 |
|
6 | |||
7 | Authors: |
|
7 | Authors: | |
8 |
|
8 | |||
9 | * Evan Patterson |
|
9 | * Evan Patterson | |
10 | * Min RK |
|
10 | * Min RK | |
11 | * Erik Tollerud |
|
11 | * Erik Tollerud | |
12 | * Fernando Perez |
|
12 | * Fernando Perez | |
13 | * Bussonnier Matthias |
|
13 | * Bussonnier Matthias | |
14 | * Thomas Kluyver |
|
14 | * Thomas Kluyver | |
15 | * Paul Ivanov |
|
15 | * Paul Ivanov | |
16 |
|
16 | |||
17 | """ |
|
17 | """ | |
18 |
|
18 | |||
19 | #----------------------------------------------------------------------------- |
|
19 | #----------------------------------------------------------------------------- | |
20 | # Imports |
|
20 | # Imports | |
21 | #----------------------------------------------------------------------------- |
|
21 | #----------------------------------------------------------------------------- | |
22 |
|
22 | |||
23 | # stdlib imports |
|
23 | # stdlib imports | |
24 | import atexit |
|
24 | import atexit | |
25 | import json |
|
25 | import json | |
26 | import os |
|
26 | import os | |
27 | import shutil |
|
27 | import shutil | |
28 | import signal |
|
28 | import signal | |
29 | import sys |
|
29 | import sys | |
30 | import uuid |
|
30 | import uuid | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | # Local imports |
|
33 | # Local imports | |
34 | from IPython.config.application import boolean_flag |
|
34 | from IPython.config.application import boolean_flag | |
35 | from IPython.config.configurable import Configurable |
|
35 | from IPython.config.configurable import Configurable | |
36 | from IPython.core.profiledir import ProfileDir |
|
36 | from IPython.core.profiledir import ProfileDir | |
37 | from IPython.kernel.blocking import BlockingKernelClient |
|
37 | from IPython.kernel.blocking import BlockingKernelClient | |
38 | from IPython.kernel import KernelManager |
|
38 | from IPython.kernel import KernelManager | |
39 | from IPython.kernel import tunnel_to_kernel, find_connection_file, swallow_argv |
|
39 | from IPython.kernel import tunnel_to_kernel, find_connection_file, swallow_argv | |
40 | from IPython.utils.path import filefind |
|
40 | from IPython.utils.path import filefind | |
41 | from IPython.utils.py3compat import str_to_bytes |
|
41 | from IPython.utils.py3compat import str_to_bytes | |
42 | from IPython.utils.traitlets import ( |
|
42 | from IPython.utils.traitlets import ( | |
43 | Dict, List, Unicode, CUnicode, Int, CBool, Any, CaselessStrEnum |
|
43 | Dict, List, Unicode, CUnicode, Int, CBool, Any, CaselessStrEnum | |
44 | ) |
|
44 | ) | |
45 | from IPython.kernel.zmq.kernelapp import ( |
|
45 | from IPython.kernel.zmq.kernelapp import ( | |
46 | kernel_flags, |
|
46 | kernel_flags, | |
47 | kernel_aliases, |
|
47 | kernel_aliases, | |
48 | IPKernelApp |
|
48 | IPKernelApp | |
49 | ) |
|
49 | ) | |
50 | from IPython.kernel.zmq.session import Session, default_secure |
|
50 | from IPython.kernel.zmq.session import Session, default_secure | |
51 | from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell |
|
51 | from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell | |
52 |
|
52 | |||
53 | #----------------------------------------------------------------------------- |
|
53 | #----------------------------------------------------------------------------- | |
54 | # Network Constants |
|
54 | # Network Constants | |
55 | #----------------------------------------------------------------------------- |
|
55 | #----------------------------------------------------------------------------- | |
56 |
|
56 | |||
57 | from IPython.utils.localinterfaces import LOCALHOST, LOCAL_IPS |
|
57 | from IPython.utils.localinterfaces import LOCALHOST, LOCAL_IPS | |
58 |
|
58 | |||
59 | #----------------------------------------------------------------------------- |
|
59 | #----------------------------------------------------------------------------- | |
60 | # Globals |
|
60 | # Globals | |
61 | #----------------------------------------------------------------------------- |
|
61 | #----------------------------------------------------------------------------- | |
62 |
|
62 | |||
63 |
|
63 | |||
64 | #----------------------------------------------------------------------------- |
|
64 | #----------------------------------------------------------------------------- | |
65 | # Aliases and Flags |
|
65 | # Aliases and Flags | |
66 | #----------------------------------------------------------------------------- |
|
66 | #----------------------------------------------------------------------------- | |
67 |
|
67 | |||
68 | flags = dict(kernel_flags) |
|
68 | flags = dict(kernel_flags) | |
69 |
|
69 | |||
70 | # the flags that are specific to the frontend |
|
70 | # the flags that are specific to the frontend | |
71 | # these must be scrubbed before being passed to the kernel, |
|
71 | # these must be scrubbed before being passed to the kernel, | |
72 | # or it will raise an error on unrecognized flags |
|
72 | # or it will raise an error on unrecognized flags | |
73 | app_flags = { |
|
73 | app_flags = { | |
74 | 'existing' : ({'IPythonConsoleApp' : {'existing' : 'kernel*.json'}}, |
|
74 | 'existing' : ({'IPythonConsoleApp' : {'existing' : 'kernel*.json'}}, | |
75 | "Connect to an existing kernel. If no argument specified, guess most recent"), |
|
75 | "Connect to an existing kernel. If no argument specified, guess most recent"), | |
76 | } |
|
76 | } | |
77 | app_flags.update(boolean_flag( |
|
77 | app_flags.update(boolean_flag( | |
78 | 'confirm-exit', 'IPythonConsoleApp.confirm_exit', |
|
78 | 'confirm-exit', 'IPythonConsoleApp.confirm_exit', | |
79 | """Set to display confirmation dialog on exit. You can always use 'exit' or 'quit', |
|
79 | """Set to display confirmation dialog on exit. You can always use 'exit' or 'quit', | |
80 | to force a direct exit without any confirmation. |
|
80 | to force a direct exit without any confirmation. | |
81 | """, |
|
81 | """, | |
82 | """Don't prompt the user when exiting. This will terminate the kernel |
|
82 | """Don't prompt the user when exiting. This will terminate the kernel | |
83 | if it is owned by the frontend, and leave it alive if it is external. |
|
83 | if it is owned by the frontend, and leave it alive if it is external. | |
84 | """ |
|
84 | """ | |
85 | )) |
|
85 | )) | |
86 | flags.update(app_flags) |
|
86 | flags.update(app_flags) | |
87 |
|
87 | |||
88 | aliases = dict(kernel_aliases) |
|
88 | aliases = dict(kernel_aliases) | |
89 |
|
89 | |||
90 | # also scrub aliases from the frontend |
|
90 | # also scrub aliases from the frontend | |
91 | app_aliases = dict( |
|
91 | app_aliases = dict( | |
92 | ip = 'KernelManager.ip', |
|
92 | ip = 'KernelManager.ip', | |
93 | transport = 'KernelManager.transport', |
|
93 | transport = 'KernelManager.transport', | |
94 | hb = 'IPythonConsoleApp.hb_port', |
|
94 | hb = 'IPythonConsoleApp.hb_port', | |
95 | shell = 'IPythonConsoleApp.shell_port', |
|
95 | shell = 'IPythonConsoleApp.shell_port', | |
96 | iopub = 'IPythonConsoleApp.iopub_port', |
|
96 | iopub = 'IPythonConsoleApp.iopub_port', | |
97 | stdin = 'IPythonConsoleApp.stdin_port', |
|
97 | stdin = 'IPythonConsoleApp.stdin_port', | |
98 | existing = 'IPythonConsoleApp.existing', |
|
98 | existing = 'IPythonConsoleApp.existing', | |
99 | f = 'IPythonConsoleApp.connection_file', |
|
99 | f = 'IPythonConsoleApp.connection_file', | |
100 |
|
100 | |||
101 |
|
101 | |||
102 | ssh = 'IPythonConsoleApp.sshserver', |
|
102 | ssh = 'IPythonConsoleApp.sshserver', | |
103 | ) |
|
103 | ) | |
104 | aliases.update(app_aliases) |
|
104 | aliases.update(app_aliases) | |
105 |
|
105 | |||
106 | #----------------------------------------------------------------------------- |
|
106 | #----------------------------------------------------------------------------- | |
107 | # Classes |
|
107 | # Classes | |
108 | #----------------------------------------------------------------------------- |
|
108 | #----------------------------------------------------------------------------- | |
109 |
|
109 | |||
110 | #----------------------------------------------------------------------------- |
|
110 | #----------------------------------------------------------------------------- | |
111 | # IPythonConsole |
|
111 | # IPythonConsole | |
112 | #----------------------------------------------------------------------------- |
|
112 | #----------------------------------------------------------------------------- | |
113 |
|
113 | |||
114 | classes = [IPKernelApp, ZMQInteractiveShell, KernelManager, ProfileDir, Session] |
|
114 | classes = [IPKernelApp, ZMQInteractiveShell, KernelManager, ProfileDir, Session] | |
115 |
|
115 | |||
116 | try: |
|
116 | try: | |
117 | from IPython.kernel.zmq.pylab.backend_inline import InlineBackend |
|
117 | from IPython.kernel.zmq.pylab.backend_inline import InlineBackend | |
118 | except ImportError: |
|
118 | except ImportError: | |
119 | pass |
|
119 | pass | |
120 | else: |
|
120 | else: | |
121 | classes.append(InlineBackend) |
|
121 | classes.append(InlineBackend) | |
122 |
|
122 | |||
123 | class IPythonConsoleApp(Configurable): |
|
123 | class IPythonConsoleApp(Configurable): | |
124 | name = 'ipython-console-mixin' |
|
124 | name = 'ipython-console-mixin' | |
125 | default_config_file_name='ipython_config.py' |
|
|||
126 |
|
125 | |||
127 | description = """ |
|
126 | description = """ | |
128 | The IPython Mixin Console. |
|
127 | The IPython Mixin Console. | |
129 |
|
128 | |||
130 | This class contains the common portions of console client (QtConsole, |
|
129 | This class contains the common portions of console client (QtConsole, | |
131 | ZMQ-based terminal console, etc). It is not a full console, in that |
|
130 | ZMQ-based terminal console, etc). It is not a full console, in that | |
132 | launched terminal subprocesses will not be able to accept input. |
|
131 | launched terminal subprocesses will not be able to accept input. | |
133 |
|
132 | |||
134 | The Console using this mixing supports various extra features beyond |
|
133 | The Console using this mixing supports various extra features beyond | |
135 | the single-process Terminal IPython shell, such as connecting to |
|
134 | the single-process Terminal IPython shell, such as connecting to | |
136 | existing kernel, via: |
|
135 | existing kernel, via: | |
137 |
|
136 | |||
138 | ipython <appname> --existing |
|
137 | ipython <appname> --existing | |
139 |
|
138 | |||
140 | as well as tunnel via SSH |
|
139 | as well as tunnel via SSH | |
141 |
|
140 | |||
142 | """ |
|
141 | """ | |
143 |
|
142 | |||
144 | classes = classes |
|
143 | classes = classes | |
145 | flags = Dict(flags) |
|
144 | flags = Dict(flags) | |
146 | aliases = Dict(aliases) |
|
145 | aliases = Dict(aliases) | |
147 | kernel_manager_class = KernelManager |
|
146 | kernel_manager_class = KernelManager | |
148 | kernel_client_class = BlockingKernelClient |
|
147 | kernel_client_class = BlockingKernelClient | |
149 |
|
148 | |||
150 | kernel_argv = List(Unicode) |
|
149 | kernel_argv = List(Unicode) | |
151 | # frontend flags&aliases to be stripped when building kernel_argv |
|
150 | # frontend flags&aliases to be stripped when building kernel_argv | |
152 | frontend_flags = Any(app_flags) |
|
151 | frontend_flags = Any(app_flags) | |
153 | frontend_aliases = Any(app_aliases) |
|
152 | frontend_aliases = Any(app_aliases) | |
154 |
|
153 | |||
155 | # create requested profiles by default, if they don't exist: |
|
154 | # create requested profiles by default, if they don't exist: | |
156 | auto_create = CBool(True) |
|
155 | auto_create = CBool(True) | |
157 | # connection info: |
|
156 | # connection info: | |
158 |
|
157 | |||
159 | sshserver = Unicode('', config=True, |
|
158 | sshserver = Unicode('', config=True, | |
160 | help="""The SSH server to use to connect to the kernel.""") |
|
159 | help="""The SSH server to use to connect to the kernel.""") | |
161 | sshkey = Unicode('', config=True, |
|
160 | sshkey = Unicode('', config=True, | |
162 | help="""Path to the ssh key to use for logging in to the ssh server.""") |
|
161 | help="""Path to the ssh key to use for logging in to the ssh server.""") | |
163 |
|
162 | |||
164 | hb_port = Int(0, config=True, |
|
163 | hb_port = Int(0, config=True, | |
165 | help="set the heartbeat port [default: random]") |
|
164 | help="set the heartbeat port [default: random]") | |
166 | shell_port = Int(0, config=True, |
|
165 | shell_port = Int(0, config=True, | |
167 | help="set the shell (ROUTER) port [default: random]") |
|
166 | help="set the shell (ROUTER) port [default: random]") | |
168 | iopub_port = Int(0, config=True, |
|
167 | iopub_port = Int(0, config=True, | |
169 | help="set the iopub (PUB) port [default: random]") |
|
168 | help="set the iopub (PUB) port [default: random]") | |
170 | stdin_port = Int(0, config=True, |
|
169 | stdin_port = Int(0, config=True, | |
171 | help="set the stdin (DEALER) port [default: random]") |
|
170 | help="set the stdin (DEALER) port [default: random]") | |
172 | connection_file = Unicode('', config=True, |
|
171 | connection_file = Unicode('', config=True, | |
173 | help="""JSON file in which to store connection info [default: kernel-<pid>.json] |
|
172 | help="""JSON file in which to store connection info [default: kernel-<pid>.json] | |
174 |
|
173 | |||
175 | This file will contain the IP, ports, and authentication key needed to connect |
|
174 | This file will contain the IP, ports, and authentication key needed to connect | |
176 | clients to this kernel. By default, this file will be created in the security-dir |
|
175 | clients to this kernel. By default, this file will be created in the security-dir | |
177 | of the current profile, but can be specified by absolute path. |
|
176 | of the current profile, but can be specified by absolute path. | |
178 | """) |
|
177 | """) | |
179 | def _connection_file_default(self): |
|
178 | def _connection_file_default(self): | |
180 | return 'kernel-%i.json' % os.getpid() |
|
179 | return 'kernel-%i.json' % os.getpid() | |
181 |
|
180 | |||
182 | existing = CUnicode('', config=True, |
|
181 | existing = CUnicode('', config=True, | |
183 | help="""Connect to an already running kernel""") |
|
182 | help="""Connect to an already running kernel""") | |
184 |
|
183 | |||
185 | confirm_exit = CBool(True, config=True, |
|
184 | confirm_exit = CBool(True, config=True, | |
186 | help=""" |
|
185 | help=""" | |
187 | Set to display confirmation dialog on exit. You can always use 'exit' or 'quit', |
|
186 | Set to display confirmation dialog on exit. You can always use 'exit' or 'quit', | |
188 | to force a direct exit without any confirmation.""", |
|
187 | to force a direct exit without any confirmation.""", | |
189 | ) |
|
188 | ) | |
190 |
|
189 | |||
191 |
|
190 | |||
192 | def build_kernel_argv(self, argv=None): |
|
191 | def build_kernel_argv(self, argv=None): | |
193 | """build argv to be passed to kernel subprocess""" |
|
192 | """build argv to be passed to kernel subprocess""" | |
194 | if argv is None: |
|
193 | if argv is None: | |
195 | argv = sys.argv[1:] |
|
194 | argv = sys.argv[1:] | |
196 | self.kernel_argv = swallow_argv(argv, self.frontend_aliases, self.frontend_flags) |
|
195 | self.kernel_argv = swallow_argv(argv, self.frontend_aliases, self.frontend_flags) | |
197 | # kernel should inherit default config file from frontend |
|
196 | # kernel should inherit default config file from frontend | |
198 | self.kernel_argv.append("--IPKernelApp.parent_appname='%s'" % self.name) |
|
197 | self.kernel_argv.append("--IPKernelApp.parent_appname='%s'" % self.name) | |
199 |
|
198 | |||
200 | def init_connection_file(self): |
|
199 | def init_connection_file(self): | |
201 | """find the connection file, and load the info if found. |
|
200 | """find the connection file, and load the info if found. | |
202 |
|
201 | |||
203 | The current working directory and the current profile's security |
|
202 | The current working directory and the current profile's security | |
204 | directory will be searched for the file if it is not given by |
|
203 | directory will be searched for the file if it is not given by | |
205 | absolute path. |
|
204 | absolute path. | |
206 |
|
205 | |||
207 | When attempting to connect to an existing kernel and the `--existing` |
|
206 | When attempting to connect to an existing kernel and the `--existing` | |
208 | argument does not match an existing file, it will be interpreted as a |
|
207 | argument does not match an existing file, it will be interpreted as a | |
209 | fileglob, and the matching file in the current profile's security dir |
|
208 | fileglob, and the matching file in the current profile's security dir | |
210 | with the latest access time will be used. |
|
209 | with the latest access time will be used. | |
211 |
|
210 | |||
212 | After this method is called, self.connection_file contains the *full path* |
|
211 | After this method is called, self.connection_file contains the *full path* | |
213 | to the connection file, never just its name. |
|
212 | to the connection file, never just its name. | |
214 | """ |
|
213 | """ | |
215 | if self.existing: |
|
214 | if self.existing: | |
216 | try: |
|
215 | try: | |
217 | cf = find_connection_file(self.existing) |
|
216 | cf = find_connection_file(self.existing) | |
218 | except Exception: |
|
217 | except Exception: | |
219 | self.log.critical("Could not find existing kernel connection file %s", self.existing) |
|
218 | self.log.critical("Could not find existing kernel connection file %s", self.existing) | |
220 | self.exit(1) |
|
219 | self.exit(1) | |
221 | self.log.info("Connecting to existing kernel: %s" % cf) |
|
220 | self.log.info("Connecting to existing kernel: %s" % cf) | |
222 | self.connection_file = cf |
|
221 | self.connection_file = cf | |
223 | else: |
|
222 | else: | |
224 | # not existing, check if we are going to write the file |
|
223 | # not existing, check if we are going to write the file | |
225 | # and ensure that self.connection_file is a full path, not just the shortname |
|
224 | # and ensure that self.connection_file is a full path, not just the shortname | |
226 | try: |
|
225 | try: | |
227 | cf = find_connection_file(self.connection_file) |
|
226 | cf = find_connection_file(self.connection_file) | |
228 | except Exception: |
|
227 | except Exception: | |
229 | # file might not exist |
|
228 | # file might not exist | |
230 | if self.connection_file == os.path.basename(self.connection_file): |
|
229 | if self.connection_file == os.path.basename(self.connection_file): | |
231 | # just shortname, put it in security dir |
|
230 | # just shortname, put it in security dir | |
232 | cf = os.path.join(self.profile_dir.security_dir, self.connection_file) |
|
231 | cf = os.path.join(self.profile_dir.security_dir, self.connection_file) | |
233 | else: |
|
232 | else: | |
234 | cf = self.connection_file |
|
233 | cf = self.connection_file | |
235 | self.connection_file = cf |
|
234 | self.connection_file = cf | |
236 |
|
235 | |||
237 | # should load_connection_file only be used for existing? |
|
236 | # should load_connection_file only be used for existing? | |
238 | # as it is now, this allows reusing ports if an existing |
|
237 | # as it is now, this allows reusing ports if an existing | |
239 | # file is requested |
|
238 | # file is requested | |
240 | try: |
|
239 | try: | |
241 | self.load_connection_file() |
|
240 | self.load_connection_file() | |
242 | except Exception: |
|
241 | except Exception: | |
243 | self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True) |
|
242 | self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True) | |
244 | self.exit(1) |
|
243 | self.exit(1) | |
245 |
|
244 | |||
246 | def load_connection_file(self): |
|
245 | def load_connection_file(self): | |
247 | """load ip/port/hmac config from JSON connection file""" |
|
246 | """load ip/port/hmac config from JSON connection file""" | |
248 | # this is identical to IPKernelApp.load_connection_file |
|
247 | # this is identical to IPKernelApp.load_connection_file | |
249 | # perhaps it can be centralized somewhere? |
|
248 | # perhaps it can be centralized somewhere? | |
250 | try: |
|
249 | try: | |
251 | fname = filefind(self.connection_file, ['.', self.profile_dir.security_dir]) |
|
250 | fname = filefind(self.connection_file, ['.', self.profile_dir.security_dir]) | |
252 | except IOError: |
|
251 | except IOError: | |
253 | self.log.debug("Connection File not found: %s", self.connection_file) |
|
252 | self.log.debug("Connection File not found: %s", self.connection_file) | |
254 | return |
|
253 | return | |
255 | self.log.debug(u"Loading connection file %s", fname) |
|
254 | self.log.debug(u"Loading connection file %s", fname) | |
256 | with open(fname) as f: |
|
255 | with open(fname) as f: | |
257 | cfg = json.load(f) |
|
256 | cfg = json.load(f) | |
258 |
|
257 | |||
259 | self.config.KernelManager.transport = cfg.get('transport', 'tcp') |
|
258 | self.config.KernelManager.transport = cfg.get('transport', 'tcp') | |
260 | self.config.KernelManager.ip = cfg.get('ip', LOCALHOST) |
|
259 | self.config.KernelManager.ip = cfg.get('ip', LOCALHOST) | |
261 |
|
260 | |||
262 | for channel in ('hb', 'shell', 'iopub', 'stdin'): |
|
261 | for channel in ('hb', 'shell', 'iopub', 'stdin'): | |
263 | name = channel + '_port' |
|
262 | name = channel + '_port' | |
264 | if getattr(self, name) == 0 and name in cfg: |
|
263 | if getattr(self, name) == 0 and name in cfg: | |
265 | # not overridden by config or cl_args |
|
264 | # not overridden by config or cl_args | |
266 | setattr(self, name, cfg[name]) |
|
265 | setattr(self, name, cfg[name]) | |
267 | if 'key' in cfg: |
|
266 | if 'key' in cfg: | |
268 | self.config.Session.key = str_to_bytes(cfg['key']) |
|
267 | self.config.Session.key = str_to_bytes(cfg['key']) | |
269 |
|
268 | |||
270 | def init_ssh(self): |
|
269 | def init_ssh(self): | |
271 | """set up ssh tunnels, if needed.""" |
|
270 | """set up ssh tunnels, if needed.""" | |
272 | if not self.existing or (not self.sshserver and not self.sshkey): |
|
271 | if not self.existing or (not self.sshserver and not self.sshkey): | |
273 | return |
|
272 | return | |
274 |
|
273 | |||
275 | self.load_connection_file() |
|
274 | self.load_connection_file() | |
276 |
|
275 | |||
277 | transport = self.config.KernelManager.transport |
|
276 | transport = self.config.KernelManager.transport | |
278 | ip = self.config.KernelManager.ip |
|
277 | ip = self.config.KernelManager.ip | |
279 |
|
278 | |||
280 | if transport != 'tcp': |
|
279 | if transport != 'tcp': | |
281 | self.log.error("Can only use ssh tunnels with TCP sockets, not %s", transport) |
|
280 | self.log.error("Can only use ssh tunnels with TCP sockets, not %s", transport) | |
282 | sys.exit(-1) |
|
281 | sys.exit(-1) | |
283 |
|
282 | |||
284 | if self.sshkey and not self.sshserver: |
|
283 | if self.sshkey and not self.sshserver: | |
285 | # specifying just the key implies that we are connecting directly |
|
284 | # specifying just the key implies that we are connecting directly | |
286 | self.sshserver = ip |
|
285 | self.sshserver = ip | |
287 | ip = LOCALHOST |
|
286 | ip = LOCALHOST | |
288 |
|
287 | |||
289 | # build connection dict for tunnels: |
|
288 | # build connection dict for tunnels: | |
290 | info = dict(ip=ip, |
|
289 | info = dict(ip=ip, | |
291 | shell_port=self.shell_port, |
|
290 | shell_port=self.shell_port, | |
292 | iopub_port=self.iopub_port, |
|
291 | iopub_port=self.iopub_port, | |
293 | stdin_port=self.stdin_port, |
|
292 | stdin_port=self.stdin_port, | |
294 | hb_port=self.hb_port |
|
293 | hb_port=self.hb_port | |
295 | ) |
|
294 | ) | |
296 |
|
295 | |||
297 | self.log.info("Forwarding connections to %s via %s"%(ip, self.sshserver)) |
|
296 | self.log.info("Forwarding connections to %s via %s"%(ip, self.sshserver)) | |
298 |
|
297 | |||
299 | # tunnels return a new set of ports, which will be on localhost: |
|
298 | # tunnels return a new set of ports, which will be on localhost: | |
300 | self.config.KernelManager.ip = LOCALHOST |
|
299 | self.config.KernelManager.ip = LOCALHOST | |
301 | try: |
|
300 | try: | |
302 | newports = tunnel_to_kernel(info, self.sshserver, self.sshkey) |
|
301 | newports = tunnel_to_kernel(info, self.sshserver, self.sshkey) | |
303 | except: |
|
302 | except: | |
304 | # even catch KeyboardInterrupt |
|
303 | # even catch KeyboardInterrupt | |
305 | self.log.error("Could not setup tunnels", exc_info=True) |
|
304 | self.log.error("Could not setup tunnels", exc_info=True) | |
306 | self.exit(1) |
|
305 | self.exit(1) | |
307 |
|
306 | |||
308 | self.shell_port, self.iopub_port, self.stdin_port, self.hb_port = newports |
|
307 | self.shell_port, self.iopub_port, self.stdin_port, self.hb_port = newports | |
309 |
|
308 | |||
310 | cf = self.connection_file |
|
309 | cf = self.connection_file | |
311 | base,ext = os.path.splitext(cf) |
|
310 | base,ext = os.path.splitext(cf) | |
312 | base = os.path.basename(base) |
|
311 | base = os.path.basename(base) | |
313 | self.connection_file = os.path.basename(base)+'-ssh'+ext |
|
312 | self.connection_file = os.path.basename(base)+'-ssh'+ext | |
314 | self.log.critical("To connect another client via this tunnel, use:") |
|
313 | self.log.critical("To connect another client via this tunnel, use:") | |
315 | self.log.critical("--existing %s" % self.connection_file) |
|
314 | self.log.critical("--existing %s" % self.connection_file) | |
316 |
|
315 | |||
317 | def _new_connection_file(self): |
|
316 | def _new_connection_file(self): | |
318 | cf = '' |
|
317 | cf = '' | |
319 | while not cf: |
|
318 | while not cf: | |
320 | # we don't need a 128b id to distinguish kernels, use more readable |
|
319 | # we don't need a 128b id to distinguish kernels, use more readable | |
321 | # 48b node segment (12 hex chars). Users running more than 32k simultaneous |
|
320 | # 48b node segment (12 hex chars). Users running more than 32k simultaneous | |
322 | # kernels can subclass. |
|
321 | # kernels can subclass. | |
323 | ident = str(uuid.uuid4()).split('-')[-1] |
|
322 | ident = str(uuid.uuid4()).split('-')[-1] | |
324 | cf = os.path.join(self.profile_dir.security_dir, 'kernel-%s.json' % ident) |
|
323 | cf = os.path.join(self.profile_dir.security_dir, 'kernel-%s.json' % ident) | |
325 | # only keep if it's actually new. Protect against unlikely collision |
|
324 | # only keep if it's actually new. Protect against unlikely collision | |
326 | # in 48b random search space |
|
325 | # in 48b random search space | |
327 | cf = cf if not os.path.exists(cf) else '' |
|
326 | cf = cf if not os.path.exists(cf) else '' | |
328 | return cf |
|
327 | return cf | |
329 |
|
328 | |||
330 | def init_kernel_manager(self): |
|
329 | def init_kernel_manager(self): | |
331 | # Don't let Qt or ZMQ swallow KeyboardInterupts. |
|
330 | # Don't let Qt or ZMQ swallow KeyboardInterupts. | |
332 | if self.existing: |
|
331 | if self.existing: | |
333 | self.kernel_manager = None |
|
332 | self.kernel_manager = None | |
334 | return |
|
333 | return | |
335 | signal.signal(signal.SIGINT, signal.SIG_DFL) |
|
334 | signal.signal(signal.SIGINT, signal.SIG_DFL) | |
336 |
|
335 | |||
337 | # Create a KernelManager and start a kernel. |
|
336 | # Create a KernelManager and start a kernel. | |
338 | self.kernel_manager = self.kernel_manager_class( |
|
337 | self.kernel_manager = self.kernel_manager_class( | |
339 | shell_port=self.shell_port, |
|
338 | shell_port=self.shell_port, | |
340 | iopub_port=self.iopub_port, |
|
339 | iopub_port=self.iopub_port, | |
341 | stdin_port=self.stdin_port, |
|
340 | stdin_port=self.stdin_port, | |
342 | hb_port=self.hb_port, |
|
341 | hb_port=self.hb_port, | |
343 | connection_file=self.connection_file, |
|
342 | connection_file=self.connection_file, | |
344 | parent=self, |
|
343 | parent=self, | |
345 | ) |
|
344 | ) | |
346 | self.kernel_manager.client_factory = self.kernel_client_class |
|
345 | self.kernel_manager.client_factory = self.kernel_client_class | |
347 | self.kernel_manager.start_kernel(extra_arguments=self.kernel_argv) |
|
346 | self.kernel_manager.start_kernel(extra_arguments=self.kernel_argv) | |
348 | atexit.register(self.kernel_manager.cleanup_ipc_files) |
|
347 | atexit.register(self.kernel_manager.cleanup_ipc_files) | |
349 |
|
348 | |||
350 | if self.sshserver: |
|
349 | if self.sshserver: | |
351 | # ssh, write new connection file |
|
350 | # ssh, write new connection file | |
352 | self.kernel_manager.write_connection_file() |
|
351 | self.kernel_manager.write_connection_file() | |
353 |
|
352 | |||
354 | # in case KM defaults / ssh writing changes things: |
|
353 | # in case KM defaults / ssh writing changes things: | |
355 | km = self.kernel_manager |
|
354 | km = self.kernel_manager | |
356 | self.shell_port=km.shell_port |
|
355 | self.shell_port=km.shell_port | |
357 | self.iopub_port=km.iopub_port |
|
356 | self.iopub_port=km.iopub_port | |
358 | self.stdin_port=km.stdin_port |
|
357 | self.stdin_port=km.stdin_port | |
359 | self.hb_port=km.hb_port |
|
358 | self.hb_port=km.hb_port | |
360 | self.connection_file = km.connection_file |
|
359 | self.connection_file = km.connection_file | |
361 |
|
360 | |||
362 | atexit.register(self.kernel_manager.cleanup_connection_file) |
|
361 | atexit.register(self.kernel_manager.cleanup_connection_file) | |
363 |
|
362 | |||
364 | def init_kernel_client(self): |
|
363 | def init_kernel_client(self): | |
365 | if self.kernel_manager is not None: |
|
364 | if self.kernel_manager is not None: | |
366 | self.kernel_client = self.kernel_manager.client() |
|
365 | self.kernel_client = self.kernel_manager.client() | |
367 | else: |
|
366 | else: | |
368 | self.kernel_client = self.kernel_client_class( |
|
367 | self.kernel_client = self.kernel_client_class( | |
369 | shell_port=self.shell_port, |
|
368 | shell_port=self.shell_port, | |
370 | iopub_port=self.iopub_port, |
|
369 | iopub_port=self.iopub_port, | |
371 | stdin_port=self.stdin_port, |
|
370 | stdin_port=self.stdin_port, | |
372 | hb_port=self.hb_port, |
|
371 | hb_port=self.hb_port, | |
373 | connection_file=self.connection_file, |
|
372 | connection_file=self.connection_file, | |
374 | parent=self, |
|
373 | parent=self, | |
375 | ) |
|
374 | ) | |
376 |
|
375 | |||
377 | self.kernel_client.start_channels() |
|
376 | self.kernel_client.start_channels() | |
378 |
|
377 | |||
379 |
|
378 | |||
380 |
|
379 | |||
381 | def initialize(self, argv=None): |
|
380 | def initialize(self, argv=None): | |
382 | """ |
|
381 | """ | |
383 | Classes which mix this class in should call: |
|
382 | Classes which mix this class in should call: | |
384 | IPythonConsoleApp.initialize(self,argv) |
|
383 | IPythonConsoleApp.initialize(self,argv) | |
385 | """ |
|
384 | """ | |
386 | self.init_connection_file() |
|
385 | self.init_connection_file() | |
387 | default_secure(self.config) |
|
386 | default_secure(self.config) | |
388 | self.init_ssh() |
|
387 | self.init_ssh() | |
389 | self.init_kernel_manager() |
|
388 | self.init_kernel_manager() | |
390 | self.init_kernel_client() |
|
389 | self.init_kernel_client() | |
391 |
|
390 |
@@ -1,743 +1,742 | |||||
1 | # coding: utf-8 |
|
1 | # coding: utf-8 | |
2 | """A tornado based IPython notebook server. |
|
2 | """A tornado based IPython notebook server. | |
3 |
|
3 | |||
4 | Authors: |
|
4 | Authors: | |
5 |
|
5 | |||
6 | * Brian Granger |
|
6 | * Brian Granger | |
7 | """ |
|
7 | """ | |
8 | #----------------------------------------------------------------------------- |
|
8 | #----------------------------------------------------------------------------- | |
9 | # Copyright (C) 2013 The IPython Development Team |
|
9 | # Copyright (C) 2013 The IPython Development Team | |
10 | # |
|
10 | # | |
11 | # Distributed under the terms of the BSD License. The full license is in |
|
11 | # Distributed under the terms of the BSD License. The full license is in | |
12 | # the file COPYING, distributed as part of this software. |
|
12 | # the file COPYING, distributed as part of this software. | |
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 |
|
14 | |||
15 | #----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
16 | # Imports |
|
16 | # Imports | |
17 | #----------------------------------------------------------------------------- |
|
17 | #----------------------------------------------------------------------------- | |
18 |
|
18 | |||
19 | # stdlib |
|
19 | # stdlib | |
20 | import errno |
|
20 | import errno | |
21 | import logging |
|
21 | import logging | |
22 | import os |
|
22 | import os | |
23 | import random |
|
23 | import random | |
24 | import select |
|
24 | import select | |
25 | import signal |
|
25 | import signal | |
26 | import socket |
|
26 | import socket | |
27 | import sys |
|
27 | import sys | |
28 | import threading |
|
28 | import threading | |
29 | import time |
|
29 | import time | |
30 | import webbrowser |
|
30 | import webbrowser | |
31 |
|
31 | |||
32 |
|
32 | |||
33 | # Third party |
|
33 | # Third party | |
34 | # check for pyzmq 2.1.11 |
|
34 | # check for pyzmq 2.1.11 | |
35 | from IPython.utils.zmqrelated import check_for_zmq |
|
35 | from IPython.utils.zmqrelated import check_for_zmq | |
36 | check_for_zmq('2.1.11', 'IPython.html') |
|
36 | check_for_zmq('2.1.11', 'IPython.html') | |
37 |
|
37 | |||
38 | from jinja2 import Environment, FileSystemLoader |
|
38 | from jinja2 import Environment, FileSystemLoader | |
39 |
|
39 | |||
40 | # Install the pyzmq ioloop. This has to be done before anything else from |
|
40 | # Install the pyzmq ioloop. This has to be done before anything else from | |
41 | # tornado is imported. |
|
41 | # tornado is imported. | |
42 | from zmq.eventloop import ioloop |
|
42 | from zmq.eventloop import ioloop | |
43 | ioloop.install() |
|
43 | ioloop.install() | |
44 |
|
44 | |||
45 | # check for tornado 2.1.0 |
|
45 | # check for tornado 2.1.0 | |
46 | msg = "The IPython Notebook requires tornado >= 2.1.0" |
|
46 | msg = "The IPython Notebook requires tornado >= 2.1.0" | |
47 | try: |
|
47 | try: | |
48 | import tornado |
|
48 | import tornado | |
49 | except ImportError: |
|
49 | except ImportError: | |
50 | raise ImportError(msg) |
|
50 | raise ImportError(msg) | |
51 | try: |
|
51 | try: | |
52 | version_info = tornado.version_info |
|
52 | version_info = tornado.version_info | |
53 | except AttributeError: |
|
53 | except AttributeError: | |
54 | raise ImportError(msg + ", but you have < 1.1.0") |
|
54 | raise ImportError(msg + ", but you have < 1.1.0") | |
55 | if version_info < (2,1,0): |
|
55 | if version_info < (2,1,0): | |
56 | raise ImportError(msg + ", but you have %s" % tornado.version) |
|
56 | raise ImportError(msg + ", but you have %s" % tornado.version) | |
57 |
|
57 | |||
58 | from tornado import httpserver |
|
58 | from tornado import httpserver | |
59 | from tornado import web |
|
59 | from tornado import web | |
60 |
|
60 | |||
61 | # Our own libraries |
|
61 | # Our own libraries | |
62 | from IPython.html import DEFAULT_STATIC_FILES_PATH |
|
62 | from IPython.html import DEFAULT_STATIC_FILES_PATH | |
63 |
|
63 | |||
64 | from .services.kernels.kernelmanager import MappingKernelManager |
|
64 | from .services.kernels.kernelmanager import MappingKernelManager | |
65 | from .services.notebooks.nbmanager import NotebookManager |
|
65 | from .services.notebooks.nbmanager import NotebookManager | |
66 | from .services.notebooks.filenbmanager import FileNotebookManager |
|
66 | from .services.notebooks.filenbmanager import FileNotebookManager | |
67 | from .services.clusters.clustermanager import ClusterManager |
|
67 | from .services.clusters.clustermanager import ClusterManager | |
68 |
|
68 | |||
69 | from .base.handlers import AuthenticatedFileHandler, FileFindHandler |
|
69 | from .base.handlers import AuthenticatedFileHandler, FileFindHandler | |
70 |
|
70 | |||
71 | from IPython.config.application import catch_config_error, boolean_flag |
|
71 | from IPython.config.application import catch_config_error, boolean_flag | |
72 | from IPython.core.application import BaseIPythonApplication |
|
72 | from IPython.core.application import BaseIPythonApplication | |
73 | from IPython.consoleapp import IPythonConsoleApp |
|
73 | from IPython.consoleapp import IPythonConsoleApp | |
74 | from IPython.kernel import swallow_argv |
|
74 | from IPython.kernel import swallow_argv | |
75 | from IPython.kernel.zmq.session import default_secure |
|
75 | from IPython.kernel.zmq.session import default_secure | |
76 | from IPython.kernel.zmq.kernelapp import ( |
|
76 | from IPython.kernel.zmq.kernelapp import ( | |
77 | kernel_flags, |
|
77 | kernel_flags, | |
78 | kernel_aliases, |
|
78 | kernel_aliases, | |
79 | ) |
|
79 | ) | |
80 | from IPython.utils.importstring import import_item |
|
80 | from IPython.utils.importstring import import_item | |
81 | from IPython.utils.localinterfaces import LOCALHOST |
|
81 | from IPython.utils.localinterfaces import LOCALHOST | |
82 | from IPython.utils import submodule |
|
82 | from IPython.utils import submodule | |
83 | from IPython.utils.traitlets import ( |
|
83 | from IPython.utils.traitlets import ( | |
84 | Dict, Unicode, Integer, List, Bool, Bytes, |
|
84 | Dict, Unicode, Integer, List, Bool, Bytes, | |
85 | DottedObjectName |
|
85 | DottedObjectName | |
86 | ) |
|
86 | ) | |
87 | from IPython.utils import py3compat |
|
87 | from IPython.utils import py3compat | |
88 | from IPython.utils.path import filefind |
|
88 | from IPython.utils.path import filefind | |
89 |
|
89 | |||
90 | from .utils import url_path_join |
|
90 | from .utils import url_path_join | |
91 |
|
91 | |||
92 | #----------------------------------------------------------------------------- |
|
92 | #----------------------------------------------------------------------------- | |
93 | # Module globals |
|
93 | # Module globals | |
94 | #----------------------------------------------------------------------------- |
|
94 | #----------------------------------------------------------------------------- | |
95 |
|
95 | |||
96 | _examples = """ |
|
96 | _examples = """ | |
97 | ipython notebook # start the notebook |
|
97 | ipython notebook # start the notebook | |
98 | ipython notebook --profile=sympy # use the sympy profile |
|
98 | ipython notebook --profile=sympy # use the sympy profile | |
99 | ipython notebook --pylab=inline # pylab in inline plotting mode |
|
99 | ipython notebook --pylab=inline # pylab in inline plotting mode | |
100 | ipython notebook --certfile=mycert.pem # use SSL/TLS certificate |
|
100 | ipython notebook --certfile=mycert.pem # use SSL/TLS certificate | |
101 | ipython notebook --port=5555 --ip=* # Listen on port 5555, all interfaces |
|
101 | ipython notebook --port=5555 --ip=* # Listen on port 5555, all interfaces | |
102 | """ |
|
102 | """ | |
103 |
|
103 | |||
104 | #----------------------------------------------------------------------------- |
|
104 | #----------------------------------------------------------------------------- | |
105 | # Helper functions |
|
105 | # Helper functions | |
106 | #----------------------------------------------------------------------------- |
|
106 | #----------------------------------------------------------------------------- | |
107 |
|
107 | |||
108 | def random_ports(port, n): |
|
108 | def random_ports(port, n): | |
109 | """Generate a list of n random ports near the given port. |
|
109 | """Generate a list of n random ports near the given port. | |
110 |
|
110 | |||
111 | The first 5 ports will be sequential, and the remaining n-5 will be |
|
111 | The first 5 ports will be sequential, and the remaining n-5 will be | |
112 | randomly selected in the range [port-2*n, port+2*n]. |
|
112 | randomly selected in the range [port-2*n, port+2*n]. | |
113 | """ |
|
113 | """ | |
114 | for i in range(min(5, n)): |
|
114 | for i in range(min(5, n)): | |
115 | yield port + i |
|
115 | yield port + i | |
116 | for i in range(n-5): |
|
116 | for i in range(n-5): | |
117 | yield port + random.randint(-2*n, 2*n) |
|
117 | yield port + random.randint(-2*n, 2*n) | |
118 |
|
118 | |||
119 | def load_handlers(name): |
|
119 | def load_handlers(name): | |
120 | """Load the (URL pattern, handler) tuples for each component.""" |
|
120 | """Load the (URL pattern, handler) tuples for each component.""" | |
121 | name = 'IPython.html.' + name |
|
121 | name = 'IPython.html.' + name | |
122 | mod = __import__(name, fromlist=['default_handlers']) |
|
122 | mod = __import__(name, fromlist=['default_handlers']) | |
123 | return mod.default_handlers |
|
123 | return mod.default_handlers | |
124 |
|
124 | |||
125 | #----------------------------------------------------------------------------- |
|
125 | #----------------------------------------------------------------------------- | |
126 | # The Tornado web application |
|
126 | # The Tornado web application | |
127 | #----------------------------------------------------------------------------- |
|
127 | #----------------------------------------------------------------------------- | |
128 |
|
128 | |||
129 | class NotebookWebApplication(web.Application): |
|
129 | class NotebookWebApplication(web.Application): | |
130 |
|
130 | |||
131 | def __init__(self, ipython_app, kernel_manager, notebook_manager, |
|
131 | def __init__(self, ipython_app, kernel_manager, notebook_manager, | |
132 | cluster_manager, log, |
|
132 | cluster_manager, log, | |
133 | base_project_url, settings_overrides): |
|
133 | base_project_url, settings_overrides): | |
134 |
|
134 | |||
135 | settings = self.init_settings( |
|
135 | settings = self.init_settings( | |
136 | ipython_app, kernel_manager, notebook_manager, cluster_manager, |
|
136 | ipython_app, kernel_manager, notebook_manager, cluster_manager, | |
137 | log, base_project_url, settings_overrides) |
|
137 | log, base_project_url, settings_overrides) | |
138 | handlers = self.init_handlers(settings) |
|
138 | handlers = self.init_handlers(settings) | |
139 |
|
139 | |||
140 | super(NotebookWebApplication, self).__init__(handlers, **settings) |
|
140 | super(NotebookWebApplication, self).__init__(handlers, **settings) | |
141 |
|
141 | |||
142 | def init_settings(self, ipython_app, kernel_manager, notebook_manager, |
|
142 | def init_settings(self, ipython_app, kernel_manager, notebook_manager, | |
143 | cluster_manager, log, |
|
143 | cluster_manager, log, | |
144 | base_project_url, settings_overrides): |
|
144 | base_project_url, settings_overrides): | |
145 | # Python < 2.6.5 doesn't accept unicode keys in f(**kwargs), and |
|
145 | # Python < 2.6.5 doesn't accept unicode keys in f(**kwargs), and | |
146 | # base_project_url will always be unicode, which will in turn |
|
146 | # base_project_url will always be unicode, which will in turn | |
147 | # make the patterns unicode, and ultimately result in unicode |
|
147 | # make the patterns unicode, and ultimately result in unicode | |
148 | # keys in kwargs to handler._execute(**kwargs) in tornado. |
|
148 | # keys in kwargs to handler._execute(**kwargs) in tornado. | |
149 | # This enforces that base_project_url be ascii in that situation. |
|
149 | # This enforces that base_project_url be ascii in that situation. | |
150 | # |
|
150 | # | |
151 | # Note that the URLs these patterns check against are escaped, |
|
151 | # Note that the URLs these patterns check against are escaped, | |
152 | # and thus guaranteed to be ASCII: 'héllo' is really 'h%C3%A9llo'. |
|
152 | # and thus guaranteed to be ASCII: 'héllo' is really 'h%C3%A9llo'. | |
153 | base_project_url = py3compat.unicode_to_str(base_project_url, 'ascii') |
|
153 | base_project_url = py3compat.unicode_to_str(base_project_url, 'ascii') | |
154 | template_path = os.path.join(os.path.dirname(__file__), "templates") |
|
154 | template_path = os.path.join(os.path.dirname(__file__), "templates") | |
155 | settings = dict( |
|
155 | settings = dict( | |
156 | # basics |
|
156 | # basics | |
157 | base_project_url=base_project_url, |
|
157 | base_project_url=base_project_url, | |
158 | base_kernel_url=ipython_app.base_kernel_url, |
|
158 | base_kernel_url=ipython_app.base_kernel_url, | |
159 | template_path=template_path, |
|
159 | template_path=template_path, | |
160 | static_path=ipython_app.static_file_path, |
|
160 | static_path=ipython_app.static_file_path, | |
161 | static_handler_class = FileFindHandler, |
|
161 | static_handler_class = FileFindHandler, | |
162 | static_url_prefix = url_path_join(base_project_url,'/static/'), |
|
162 | static_url_prefix = url_path_join(base_project_url,'/static/'), | |
163 |
|
163 | |||
164 | # authentication |
|
164 | # authentication | |
165 | cookie_secret=ipython_app.cookie_secret, |
|
165 | cookie_secret=ipython_app.cookie_secret, | |
166 | login_url=url_path_join(base_project_url,'/login'), |
|
166 | login_url=url_path_join(base_project_url,'/login'), | |
167 | read_only=ipython_app.read_only, |
|
167 | read_only=ipython_app.read_only, | |
168 | password=ipython_app.password, |
|
168 | password=ipython_app.password, | |
169 |
|
169 | |||
170 | # managers |
|
170 | # managers | |
171 | kernel_manager=kernel_manager, |
|
171 | kernel_manager=kernel_manager, | |
172 | notebook_manager=notebook_manager, |
|
172 | notebook_manager=notebook_manager, | |
173 | cluster_manager=cluster_manager, |
|
173 | cluster_manager=cluster_manager, | |
174 |
|
174 | |||
175 | # IPython stuff |
|
175 | # IPython stuff | |
176 | mathjax_url=ipython_app.mathjax_url, |
|
176 | mathjax_url=ipython_app.mathjax_url, | |
177 | config=ipython_app.config, |
|
177 | config=ipython_app.config, | |
178 | use_less=ipython_app.use_less, |
|
178 | use_less=ipython_app.use_less, | |
179 | jinja2_env=Environment(loader=FileSystemLoader(template_path)), |
|
179 | jinja2_env=Environment(loader=FileSystemLoader(template_path)), | |
180 | ) |
|
180 | ) | |
181 |
|
181 | |||
182 | # allow custom overrides for the tornado web app. |
|
182 | # allow custom overrides for the tornado web app. | |
183 | settings.update(settings_overrides) |
|
183 | settings.update(settings_overrides) | |
184 | return settings |
|
184 | return settings | |
185 |
|
185 | |||
186 | def init_handlers(self, settings): |
|
186 | def init_handlers(self, settings): | |
187 | # Load the (URL pattern, handler) tuples for each component. |
|
187 | # Load the (URL pattern, handler) tuples for each component. | |
188 | handlers = [] |
|
188 | handlers = [] | |
189 | handlers.extend(load_handlers('base.handlers')) |
|
189 | handlers.extend(load_handlers('base.handlers')) | |
190 | handlers.extend(load_handlers('tree.handlers')) |
|
190 | handlers.extend(load_handlers('tree.handlers')) | |
191 | handlers.extend(load_handlers('auth.login')) |
|
191 | handlers.extend(load_handlers('auth.login')) | |
192 | handlers.extend(load_handlers('auth.logout')) |
|
192 | handlers.extend(load_handlers('auth.logout')) | |
193 | handlers.extend(load_handlers('notebook.handlers')) |
|
193 | handlers.extend(load_handlers('notebook.handlers')) | |
194 | handlers.extend(load_handlers('services.kernels.handlers')) |
|
194 | handlers.extend(load_handlers('services.kernels.handlers')) | |
195 | handlers.extend(load_handlers('services.notebooks.handlers')) |
|
195 | handlers.extend(load_handlers('services.notebooks.handlers')) | |
196 | handlers.extend(load_handlers('services.clusters.handlers')) |
|
196 | handlers.extend(load_handlers('services.clusters.handlers')) | |
197 | handlers.extend([ |
|
197 | handlers.extend([ | |
198 | (r"/files/(.*)", AuthenticatedFileHandler, {'path' : settings['notebook_manager'].notebook_dir}), |
|
198 | (r"/files/(.*)", AuthenticatedFileHandler, {'path' : settings['notebook_manager'].notebook_dir}), | |
199 | ]) |
|
199 | ]) | |
200 | # prepend base_project_url onto the patterns that we match |
|
200 | # prepend base_project_url onto the patterns that we match | |
201 | new_handlers = [] |
|
201 | new_handlers = [] | |
202 | for handler in handlers: |
|
202 | for handler in handlers: | |
203 | pattern = url_path_join(settings['base_project_url'], handler[0]) |
|
203 | pattern = url_path_join(settings['base_project_url'], handler[0]) | |
204 | new_handler = tuple([pattern] + list(handler[1:])) |
|
204 | new_handler = tuple([pattern] + list(handler[1:])) | |
205 | new_handlers.append(new_handler) |
|
205 | new_handlers.append(new_handler) | |
206 | return new_handlers |
|
206 | return new_handlers | |
207 |
|
207 | |||
208 |
|
208 | |||
209 |
|
209 | |||
210 | #----------------------------------------------------------------------------- |
|
210 | #----------------------------------------------------------------------------- | |
211 | # Aliases and Flags |
|
211 | # Aliases and Flags | |
212 | #----------------------------------------------------------------------------- |
|
212 | #----------------------------------------------------------------------------- | |
213 |
|
213 | |||
214 | flags = dict(kernel_flags) |
|
214 | flags = dict(kernel_flags) | |
215 | flags['no-browser']=( |
|
215 | flags['no-browser']=( | |
216 | {'NotebookApp' : {'open_browser' : False}}, |
|
216 | {'NotebookApp' : {'open_browser' : False}}, | |
217 | "Don't open the notebook in a browser after startup." |
|
217 | "Don't open the notebook in a browser after startup." | |
218 | ) |
|
218 | ) | |
219 | flags['no-mathjax']=( |
|
219 | flags['no-mathjax']=( | |
220 | {'NotebookApp' : {'enable_mathjax' : False}}, |
|
220 | {'NotebookApp' : {'enable_mathjax' : False}}, | |
221 | """Disable MathJax |
|
221 | """Disable MathJax | |
222 |
|
222 | |||
223 | MathJax is the javascript library IPython uses to render math/LaTeX. It is |
|
223 | MathJax is the javascript library IPython uses to render math/LaTeX. It is | |
224 | very large, so you may want to disable it if you have a slow internet |
|
224 | very large, so you may want to disable it if you have a slow internet | |
225 | connection, or for offline use of the notebook. |
|
225 | connection, or for offline use of the notebook. | |
226 |
|
226 | |||
227 | When disabled, equations etc. will appear as their untransformed TeX source. |
|
227 | When disabled, equations etc. will appear as their untransformed TeX source. | |
228 | """ |
|
228 | """ | |
229 | ) |
|
229 | ) | |
230 | flags['read-only'] = ( |
|
230 | flags['read-only'] = ( | |
231 | {'NotebookApp' : {'read_only' : True}}, |
|
231 | {'NotebookApp' : {'read_only' : True}}, | |
232 | """Allow read-only access to notebooks. |
|
232 | """Allow read-only access to notebooks. | |
233 |
|
233 | |||
234 | When using a password to protect the notebook server, this flag |
|
234 | When using a password to protect the notebook server, this flag | |
235 | allows unauthenticated clients to view the notebook list, and |
|
235 | allows unauthenticated clients to view the notebook list, and | |
236 | individual notebooks, but not edit them, start kernels, or run |
|
236 | individual notebooks, but not edit them, start kernels, or run | |
237 | code. |
|
237 | code. | |
238 |
|
238 | |||
239 | If no password is set, the server will be entirely read-only. |
|
239 | If no password is set, the server will be entirely read-only. | |
240 | """ |
|
240 | """ | |
241 | ) |
|
241 | ) | |
242 |
|
242 | |||
243 | # Add notebook manager flags |
|
243 | # Add notebook manager flags | |
244 | flags.update(boolean_flag('script', 'FileNotebookManager.save_script', |
|
244 | flags.update(boolean_flag('script', 'FileNotebookManager.save_script', | |
245 | 'Auto-save a .py script everytime the .ipynb notebook is saved', |
|
245 | 'Auto-save a .py script everytime the .ipynb notebook is saved', | |
246 | 'Do not auto-save .py scripts for every notebook')) |
|
246 | 'Do not auto-save .py scripts for every notebook')) | |
247 |
|
247 | |||
248 | # the flags that are specific to the frontend |
|
248 | # the flags that are specific to the frontend | |
249 | # these must be scrubbed before being passed to the kernel, |
|
249 | # these must be scrubbed before being passed to the kernel, | |
250 | # or it will raise an error on unrecognized flags |
|
250 | # or it will raise an error on unrecognized flags | |
251 | notebook_flags = ['no-browser', 'no-mathjax', 'read-only', 'script', 'no-script'] |
|
251 | notebook_flags = ['no-browser', 'no-mathjax', 'read-only', 'script', 'no-script'] | |
252 |
|
252 | |||
253 | aliases = dict(kernel_aliases) |
|
253 | aliases = dict(kernel_aliases) | |
254 |
|
254 | |||
255 | aliases.update({ |
|
255 | aliases.update({ | |
256 | 'ip': 'NotebookApp.ip', |
|
256 | 'ip': 'NotebookApp.ip', | |
257 | 'port': 'NotebookApp.port', |
|
257 | 'port': 'NotebookApp.port', | |
258 | 'port-retries': 'NotebookApp.port_retries', |
|
258 | 'port-retries': 'NotebookApp.port_retries', | |
259 | 'transport': 'KernelManager.transport', |
|
259 | 'transport': 'KernelManager.transport', | |
260 | 'keyfile': 'NotebookApp.keyfile', |
|
260 | 'keyfile': 'NotebookApp.keyfile', | |
261 | 'certfile': 'NotebookApp.certfile', |
|
261 | 'certfile': 'NotebookApp.certfile', | |
262 | 'notebook-dir': 'NotebookManager.notebook_dir', |
|
262 | 'notebook-dir': 'NotebookManager.notebook_dir', | |
263 | 'browser': 'NotebookApp.browser', |
|
263 | 'browser': 'NotebookApp.browser', | |
264 | }) |
|
264 | }) | |
265 |
|
265 | |||
266 | # remove ipkernel flags that are singletons, and don't make sense in |
|
266 | # remove ipkernel flags that are singletons, and don't make sense in | |
267 | # multi-kernel evironment: |
|
267 | # multi-kernel evironment: | |
268 | aliases.pop('f', None) |
|
268 | aliases.pop('f', None) | |
269 |
|
269 | |||
270 | notebook_aliases = [u'port', u'port-retries', u'ip', u'keyfile', u'certfile', |
|
270 | notebook_aliases = [u'port', u'port-retries', u'ip', u'keyfile', u'certfile', | |
271 | u'notebook-dir'] |
|
271 | u'notebook-dir'] | |
272 |
|
272 | |||
273 | #----------------------------------------------------------------------------- |
|
273 | #----------------------------------------------------------------------------- | |
274 | # NotebookApp |
|
274 | # NotebookApp | |
275 | #----------------------------------------------------------------------------- |
|
275 | #----------------------------------------------------------------------------- | |
276 |
|
276 | |||
277 | class NotebookApp(BaseIPythonApplication): |
|
277 | class NotebookApp(BaseIPythonApplication): | |
278 |
|
278 | |||
279 | name = 'ipython-notebook' |
|
279 | name = 'ipython-notebook' | |
280 | default_config_file_name='ipython_notebook_config.py' |
|
|||
281 |
|
280 | |||
282 | description = """ |
|
281 | description = """ | |
283 | The IPython HTML Notebook. |
|
282 | The IPython HTML Notebook. | |
284 |
|
283 | |||
285 | This launches a Tornado based HTML Notebook Server that serves up an |
|
284 | This launches a Tornado based HTML Notebook Server that serves up an | |
286 | HTML5/Javascript Notebook client. |
|
285 | HTML5/Javascript Notebook client. | |
287 | """ |
|
286 | """ | |
288 | examples = _examples |
|
287 | examples = _examples | |
289 |
|
288 | |||
290 | classes = IPythonConsoleApp.classes + [MappingKernelManager, NotebookManager, |
|
289 | classes = IPythonConsoleApp.classes + [MappingKernelManager, NotebookManager, | |
291 | FileNotebookManager] |
|
290 | FileNotebookManager] | |
292 | flags = Dict(flags) |
|
291 | flags = Dict(flags) | |
293 | aliases = Dict(aliases) |
|
292 | aliases = Dict(aliases) | |
294 |
|
293 | |||
295 | kernel_argv = List(Unicode) |
|
294 | kernel_argv = List(Unicode) | |
296 |
|
295 | |||
297 | def _log_level_default(self): |
|
296 | def _log_level_default(self): | |
298 | return logging.INFO |
|
297 | return logging.INFO | |
299 |
|
298 | |||
300 | def _log_format_default(self): |
|
299 | def _log_format_default(self): | |
301 | """override default log format to include time""" |
|
300 | """override default log format to include time""" | |
302 | return u"%(asctime)s.%(msecs).03d [%(name)s]%(highlevel)s %(message)s" |
|
301 | return u"%(asctime)s.%(msecs).03d [%(name)s]%(highlevel)s %(message)s" | |
303 |
|
302 | |||
304 | # create requested profiles by default, if they don't exist: |
|
303 | # create requested profiles by default, if they don't exist: | |
305 | auto_create = Bool(True) |
|
304 | auto_create = Bool(True) | |
306 |
|
305 | |||
307 | # file to be opened in the notebook server |
|
306 | # file to be opened in the notebook server | |
308 | file_to_run = Unicode('') |
|
307 | file_to_run = Unicode('') | |
309 |
|
308 | |||
310 | # Network related information. |
|
309 | # Network related information. | |
311 |
|
310 | |||
312 | ip = Unicode(LOCALHOST, config=True, |
|
311 | ip = Unicode(LOCALHOST, config=True, | |
313 | help="The IP address the notebook server will listen on." |
|
312 | help="The IP address the notebook server will listen on." | |
314 | ) |
|
313 | ) | |
315 |
|
314 | |||
316 | def _ip_changed(self, name, old, new): |
|
315 | def _ip_changed(self, name, old, new): | |
317 | if new == u'*': self.ip = u'' |
|
316 | if new == u'*': self.ip = u'' | |
318 |
|
317 | |||
319 | port = Integer(8888, config=True, |
|
318 | port = Integer(8888, config=True, | |
320 | help="The port the notebook server will listen on." |
|
319 | help="The port the notebook server will listen on." | |
321 | ) |
|
320 | ) | |
322 | port_retries = Integer(50, config=True, |
|
321 | port_retries = Integer(50, config=True, | |
323 | help="The number of additional ports to try if the specified port is not available." |
|
322 | help="The number of additional ports to try if the specified port is not available." | |
324 | ) |
|
323 | ) | |
325 |
|
324 | |||
326 | certfile = Unicode(u'', config=True, |
|
325 | certfile = Unicode(u'', config=True, | |
327 | help="""The full path to an SSL/TLS certificate file.""" |
|
326 | help="""The full path to an SSL/TLS certificate file.""" | |
328 | ) |
|
327 | ) | |
329 |
|
328 | |||
330 | keyfile = Unicode(u'', config=True, |
|
329 | keyfile = Unicode(u'', config=True, | |
331 | help="""The full path to a private key file for usage with SSL/TLS.""" |
|
330 | help="""The full path to a private key file for usage with SSL/TLS.""" | |
332 | ) |
|
331 | ) | |
333 |
|
332 | |||
334 | cookie_secret = Bytes(b'', config=True, |
|
333 | cookie_secret = Bytes(b'', config=True, | |
335 | help="""The random bytes used to secure cookies. |
|
334 | help="""The random bytes used to secure cookies. | |
336 | By default this is a new random number every time you start the Notebook. |
|
335 | By default this is a new random number every time you start the Notebook. | |
337 | Set it to a value in a config file to enable logins to persist across server sessions. |
|
336 | Set it to a value in a config file to enable logins to persist across server sessions. | |
338 |
|
337 | |||
339 | Note: Cookie secrets should be kept private, do not share config files with |
|
338 | Note: Cookie secrets should be kept private, do not share config files with | |
340 | cookie_secret stored in plaintext (you can read the value from a file). |
|
339 | cookie_secret stored in plaintext (you can read the value from a file). | |
341 | """ |
|
340 | """ | |
342 | ) |
|
341 | ) | |
343 | def _cookie_secret_default(self): |
|
342 | def _cookie_secret_default(self): | |
344 | return os.urandom(1024) |
|
343 | return os.urandom(1024) | |
345 |
|
344 | |||
346 | password = Unicode(u'', config=True, |
|
345 | password = Unicode(u'', config=True, | |
347 | help="""Hashed password to use for web authentication. |
|
346 | help="""Hashed password to use for web authentication. | |
348 |
|
347 | |||
349 | To generate, type in a python/IPython shell: |
|
348 | To generate, type in a python/IPython shell: | |
350 |
|
349 | |||
351 | from IPython.lib import passwd; passwd() |
|
350 | from IPython.lib import passwd; passwd() | |
352 |
|
351 | |||
353 | The string should be of the form type:salt:hashed-password. |
|
352 | The string should be of the form type:salt:hashed-password. | |
354 | """ |
|
353 | """ | |
355 | ) |
|
354 | ) | |
356 |
|
355 | |||
357 | open_browser = Bool(True, config=True, |
|
356 | open_browser = Bool(True, config=True, | |
358 | help="""Whether to open in a browser after starting. |
|
357 | help="""Whether to open in a browser after starting. | |
359 | The specific browser used is platform dependent and |
|
358 | The specific browser used is platform dependent and | |
360 | determined by the python standard library `webbrowser` |
|
359 | determined by the python standard library `webbrowser` | |
361 | module, unless it is overridden using the --browser |
|
360 | module, unless it is overridden using the --browser | |
362 | (NotebookApp.browser) configuration option. |
|
361 | (NotebookApp.browser) configuration option. | |
363 | """) |
|
362 | """) | |
364 |
|
363 | |||
365 | browser = Unicode(u'', config=True, |
|
364 | browser = Unicode(u'', config=True, | |
366 | help="""Specify what command to use to invoke a web |
|
365 | help="""Specify what command to use to invoke a web | |
367 | browser when opening the notebook. If not specified, the |
|
366 | browser when opening the notebook. If not specified, the | |
368 | default browser will be determined by the `webbrowser` |
|
367 | default browser will be determined by the `webbrowser` | |
369 | standard library module, which allows setting of the |
|
368 | standard library module, which allows setting of the | |
370 | BROWSER environment variable to override it. |
|
369 | BROWSER environment variable to override it. | |
371 | """) |
|
370 | """) | |
372 |
|
371 | |||
373 | read_only = Bool(False, config=True, |
|
372 | read_only = Bool(False, config=True, | |
374 | help="Whether to prevent editing/execution of notebooks." |
|
373 | help="Whether to prevent editing/execution of notebooks." | |
375 | ) |
|
374 | ) | |
376 |
|
375 | |||
377 | use_less = Bool(False, config=True, |
|
376 | use_less = Bool(False, config=True, | |
378 | help="""Wether to use Browser Side less-css parsing |
|
377 | help="""Wether to use Browser Side less-css parsing | |
379 | instead of compiled css version in templates that allows |
|
378 | instead of compiled css version in templates that allows | |
380 | it. This is mainly convenient when working on the less |
|
379 | it. This is mainly convenient when working on the less | |
381 | file to avoid a build step, or if user want to overwrite |
|
380 | file to avoid a build step, or if user want to overwrite | |
382 | some of the less variables without having to recompile |
|
381 | some of the less variables without having to recompile | |
383 | everything. |
|
382 | everything. | |
384 |
|
383 | |||
385 | You will need to install the less.js component in the static directory |
|
384 | You will need to install the less.js component in the static directory | |
386 | either in the source tree or in your profile folder. |
|
385 | either in the source tree or in your profile folder. | |
387 | """) |
|
386 | """) | |
388 |
|
387 | |||
389 | webapp_settings = Dict(config=True, |
|
388 | webapp_settings = Dict(config=True, | |
390 | help="Supply overrides for the tornado.web.Application that the " |
|
389 | help="Supply overrides for the tornado.web.Application that the " | |
391 | "IPython notebook uses.") |
|
390 | "IPython notebook uses.") | |
392 |
|
391 | |||
393 | enable_mathjax = Bool(True, config=True, |
|
392 | enable_mathjax = Bool(True, config=True, | |
394 | help="""Whether to enable MathJax for typesetting math/TeX |
|
393 | help="""Whether to enable MathJax for typesetting math/TeX | |
395 |
|
394 | |||
396 | MathJax is the javascript library IPython uses to render math/LaTeX. It is |
|
395 | MathJax is the javascript library IPython uses to render math/LaTeX. It is | |
397 | very large, so you may want to disable it if you have a slow internet |
|
396 | very large, so you may want to disable it if you have a slow internet | |
398 | connection, or for offline use of the notebook. |
|
397 | connection, or for offline use of the notebook. | |
399 |
|
398 | |||
400 | When disabled, equations etc. will appear as their untransformed TeX source. |
|
399 | When disabled, equations etc. will appear as their untransformed TeX source. | |
401 | """ |
|
400 | """ | |
402 | ) |
|
401 | ) | |
403 | def _enable_mathjax_changed(self, name, old, new): |
|
402 | def _enable_mathjax_changed(self, name, old, new): | |
404 | """set mathjax url to empty if mathjax is disabled""" |
|
403 | """set mathjax url to empty if mathjax is disabled""" | |
405 | if not new: |
|
404 | if not new: | |
406 | self.mathjax_url = u'' |
|
405 | self.mathjax_url = u'' | |
407 |
|
406 | |||
408 | base_project_url = Unicode('/', config=True, |
|
407 | base_project_url = Unicode('/', config=True, | |
409 | help='''The base URL for the notebook server. |
|
408 | help='''The base URL for the notebook server. | |
410 |
|
409 | |||
411 | Leading and trailing slashes can be omitted, |
|
410 | Leading and trailing slashes can be omitted, | |
412 | and will automatically be added. |
|
411 | and will automatically be added. | |
413 | ''') |
|
412 | ''') | |
414 | def _base_project_url_changed(self, name, old, new): |
|
413 | def _base_project_url_changed(self, name, old, new): | |
415 | if not new.startswith('/'): |
|
414 | if not new.startswith('/'): | |
416 | self.base_project_url = '/'+new |
|
415 | self.base_project_url = '/'+new | |
417 | elif not new.endswith('/'): |
|
416 | elif not new.endswith('/'): | |
418 | self.base_project_url = new+'/' |
|
417 | self.base_project_url = new+'/' | |
419 |
|
418 | |||
420 | base_kernel_url = Unicode('/', config=True, |
|
419 | base_kernel_url = Unicode('/', config=True, | |
421 | help='''The base URL for the kernel server |
|
420 | help='''The base URL for the kernel server | |
422 |
|
421 | |||
423 | Leading and trailing slashes can be omitted, |
|
422 | Leading and trailing slashes can be omitted, | |
424 | and will automatically be added. |
|
423 | and will automatically be added. | |
425 | ''') |
|
424 | ''') | |
426 | def _base_kernel_url_changed(self, name, old, new): |
|
425 | def _base_kernel_url_changed(self, name, old, new): | |
427 | if not new.startswith('/'): |
|
426 | if not new.startswith('/'): | |
428 | self.base_kernel_url = '/'+new |
|
427 | self.base_kernel_url = '/'+new | |
429 | elif not new.endswith('/'): |
|
428 | elif not new.endswith('/'): | |
430 | self.base_kernel_url = new+'/' |
|
429 | self.base_kernel_url = new+'/' | |
431 |
|
430 | |||
432 | websocket_url = Unicode("", config=True, |
|
431 | websocket_url = Unicode("", config=True, | |
433 | help="""The base URL for the websocket server, |
|
432 | help="""The base URL for the websocket server, | |
434 | if it differs from the HTTP server (hint: it almost certainly doesn't). |
|
433 | if it differs from the HTTP server (hint: it almost certainly doesn't). | |
435 |
|
434 | |||
436 | Should be in the form of an HTTP origin: ws[s]://hostname[:port] |
|
435 | Should be in the form of an HTTP origin: ws[s]://hostname[:port] | |
437 | """ |
|
436 | """ | |
438 | ) |
|
437 | ) | |
439 |
|
438 | |||
440 | extra_static_paths = List(Unicode, config=True, |
|
439 | extra_static_paths = List(Unicode, config=True, | |
441 | help="""Extra paths to search for serving static files. |
|
440 | help="""Extra paths to search for serving static files. | |
442 |
|
441 | |||
443 | This allows adding javascript/css to be available from the notebook server machine, |
|
442 | This allows adding javascript/css to be available from the notebook server machine, | |
444 | or overriding individual files in the IPython""" |
|
443 | or overriding individual files in the IPython""" | |
445 | ) |
|
444 | ) | |
446 | def _extra_static_paths_default(self): |
|
445 | def _extra_static_paths_default(self): | |
447 | return [os.path.join(self.profile_dir.location, 'static')] |
|
446 | return [os.path.join(self.profile_dir.location, 'static')] | |
448 |
|
447 | |||
449 | @property |
|
448 | @property | |
450 | def static_file_path(self): |
|
449 | def static_file_path(self): | |
451 | """return extra paths + the default location""" |
|
450 | """return extra paths + the default location""" | |
452 | return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] |
|
451 | return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH] | |
453 |
|
452 | |||
454 | mathjax_url = Unicode("", config=True, |
|
453 | mathjax_url = Unicode("", config=True, | |
455 | help="""The url for MathJax.js.""" |
|
454 | help="""The url for MathJax.js.""" | |
456 | ) |
|
455 | ) | |
457 | def _mathjax_url_default(self): |
|
456 | def _mathjax_url_default(self): | |
458 | if not self.enable_mathjax: |
|
457 | if not self.enable_mathjax: | |
459 | return u'' |
|
458 | return u'' | |
460 | static_url_prefix = self.webapp_settings.get("static_url_prefix", |
|
459 | static_url_prefix = self.webapp_settings.get("static_url_prefix", | |
461 | url_path_join(self.base_project_url, "static") |
|
460 | url_path_join(self.base_project_url, "static") | |
462 | ) |
|
461 | ) | |
463 | try: |
|
462 | try: | |
464 | mathjax = filefind(os.path.join('mathjax', 'MathJax.js'), self.static_file_path) |
|
463 | mathjax = filefind(os.path.join('mathjax', 'MathJax.js'), self.static_file_path) | |
465 | except IOError: |
|
464 | except IOError: | |
466 | if self.certfile: |
|
465 | if self.certfile: | |
467 | # HTTPS: load from Rackspace CDN, because SSL certificate requires it |
|
466 | # HTTPS: load from Rackspace CDN, because SSL certificate requires it | |
468 | base = u"https://c328740.ssl.cf1.rackcdn.com" |
|
467 | base = u"https://c328740.ssl.cf1.rackcdn.com" | |
469 | else: |
|
468 | else: | |
470 | base = u"http://cdn.mathjax.org" |
|
469 | base = u"http://cdn.mathjax.org" | |
471 |
|
470 | |||
472 | url = base + u"/mathjax/latest/MathJax.js" |
|
471 | url = base + u"/mathjax/latest/MathJax.js" | |
473 | self.log.info("Using MathJax from CDN: %s", url) |
|
472 | self.log.info("Using MathJax from CDN: %s", url) | |
474 | return url |
|
473 | return url | |
475 | else: |
|
474 | else: | |
476 | self.log.info("Using local MathJax from %s" % mathjax) |
|
475 | self.log.info("Using local MathJax from %s" % mathjax) | |
477 | return url_path_join(static_url_prefix, u"mathjax/MathJax.js") |
|
476 | return url_path_join(static_url_prefix, u"mathjax/MathJax.js") | |
478 |
|
477 | |||
479 | def _mathjax_url_changed(self, name, old, new): |
|
478 | def _mathjax_url_changed(self, name, old, new): | |
480 | if new and not self.enable_mathjax: |
|
479 | if new and not self.enable_mathjax: | |
481 | # enable_mathjax=False overrides mathjax_url |
|
480 | # enable_mathjax=False overrides mathjax_url | |
482 | self.mathjax_url = u'' |
|
481 | self.mathjax_url = u'' | |
483 | else: |
|
482 | else: | |
484 | self.log.info("Using MathJax: %s", new) |
|
483 | self.log.info("Using MathJax: %s", new) | |
485 |
|
484 | |||
486 | notebook_manager_class = DottedObjectName('IPython.html.services.notebooks.filenbmanager.FileNotebookManager', |
|
485 | notebook_manager_class = DottedObjectName('IPython.html.services.notebooks.filenbmanager.FileNotebookManager', | |
487 | config=True, |
|
486 | config=True, | |
488 | help='The notebook manager class to use.') |
|
487 | help='The notebook manager class to use.') | |
489 |
|
488 | |||
490 | trust_xheaders = Bool(False, config=True, |
|
489 | trust_xheaders = Bool(False, config=True, | |
491 | help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" |
|
490 | help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers" | |
492 | "sent by the upstream reverse proxy. Neccesary if the proxy handles SSL") |
|
491 | "sent by the upstream reverse proxy. Neccesary if the proxy handles SSL") | |
493 | ) |
|
492 | ) | |
494 |
|
493 | |||
495 | def parse_command_line(self, argv=None): |
|
494 | def parse_command_line(self, argv=None): | |
496 | super(NotebookApp, self).parse_command_line(argv) |
|
495 | super(NotebookApp, self).parse_command_line(argv) | |
497 | if argv is None: |
|
496 | if argv is None: | |
498 | argv = sys.argv[1:] |
|
497 | argv = sys.argv[1:] | |
499 |
|
498 | |||
500 | # Scrub frontend-specific flags |
|
499 | # Scrub frontend-specific flags | |
501 | self.kernel_argv = swallow_argv(argv, notebook_aliases, notebook_flags) |
|
500 | self.kernel_argv = swallow_argv(argv, notebook_aliases, notebook_flags) | |
502 | # Kernel should inherit default config file from frontend |
|
501 | # Kernel should inherit default config file from frontend | |
503 | self.kernel_argv.append("--IPKernelApp.parent_appname='%s'" % self.name) |
|
502 | self.kernel_argv.append("--IPKernelApp.parent_appname='%s'" % self.name) | |
504 |
|
503 | |||
505 | if self.extra_args: |
|
504 | if self.extra_args: | |
506 | f = os.path.abspath(self.extra_args[0]) |
|
505 | f = os.path.abspath(self.extra_args[0]) | |
507 | if os.path.isdir(f): |
|
506 | if os.path.isdir(f): | |
508 | nbdir = f |
|
507 | nbdir = f | |
509 | else: |
|
508 | else: | |
510 | self.file_to_run = f |
|
509 | self.file_to_run = f | |
511 | nbdir = os.path.dirname(f) |
|
510 | nbdir = os.path.dirname(f) | |
512 | self.config.NotebookManager.notebook_dir = nbdir |
|
511 | self.config.NotebookManager.notebook_dir = nbdir | |
513 |
|
512 | |||
514 | def init_configurables(self): |
|
513 | def init_configurables(self): | |
515 | # force Session default to be secure |
|
514 | # force Session default to be secure | |
516 | default_secure(self.config) |
|
515 | default_secure(self.config) | |
517 | self.kernel_manager = MappingKernelManager( |
|
516 | self.kernel_manager = MappingKernelManager( | |
518 | parent=self, log=self.log, kernel_argv=self.kernel_argv, |
|
517 | parent=self, log=self.log, kernel_argv=self.kernel_argv, | |
519 | connection_dir = self.profile_dir.security_dir, |
|
518 | connection_dir = self.profile_dir.security_dir, | |
520 | ) |
|
519 | ) | |
521 | kls = import_item(self.notebook_manager_class) |
|
520 | kls = import_item(self.notebook_manager_class) | |
522 | self.notebook_manager = kls(parent=self, log=self.log) |
|
521 | self.notebook_manager = kls(parent=self, log=self.log) | |
523 | self.notebook_manager.load_notebook_names() |
|
522 | self.notebook_manager.load_notebook_names() | |
524 | self.cluster_manager = ClusterManager(parent=self, log=self.log) |
|
523 | self.cluster_manager = ClusterManager(parent=self, log=self.log) | |
525 | self.cluster_manager.update_profiles() |
|
524 | self.cluster_manager.update_profiles() | |
526 |
|
525 | |||
527 | def init_logging(self): |
|
526 | def init_logging(self): | |
528 | # This prevents double log messages because tornado use a root logger that |
|
527 | # This prevents double log messages because tornado use a root logger that | |
529 | # self.log is a child of. The logging module dipatches log messages to a log |
|
528 | # self.log is a child of. The logging module dipatches log messages to a log | |
530 | # and all of its ancenstors until propagate is set to False. |
|
529 | # and all of its ancenstors until propagate is set to False. | |
531 | self.log.propagate = False |
|
530 | self.log.propagate = False | |
532 |
|
531 | |||
533 | # hook up tornado 3's loggers to our app handlers |
|
532 | # hook up tornado 3's loggers to our app handlers | |
534 | for name in ('access', 'application', 'general'): |
|
533 | for name in ('access', 'application', 'general'): | |
535 | logging.getLogger('tornado.%s' % name).handlers = self.log.handlers |
|
534 | logging.getLogger('tornado.%s' % name).handlers = self.log.handlers | |
536 |
|
535 | |||
537 | def init_webapp(self): |
|
536 | def init_webapp(self): | |
538 | """initialize tornado webapp and httpserver""" |
|
537 | """initialize tornado webapp and httpserver""" | |
539 | self.web_app = NotebookWebApplication( |
|
538 | self.web_app = NotebookWebApplication( | |
540 | self, self.kernel_manager, self.notebook_manager, |
|
539 | self, self.kernel_manager, self.notebook_manager, | |
541 | self.cluster_manager, self.log, |
|
540 | self.cluster_manager, self.log, | |
542 | self.base_project_url, self.webapp_settings |
|
541 | self.base_project_url, self.webapp_settings | |
543 | ) |
|
542 | ) | |
544 | if self.certfile: |
|
543 | if self.certfile: | |
545 | ssl_options = dict(certfile=self.certfile) |
|
544 | ssl_options = dict(certfile=self.certfile) | |
546 | if self.keyfile: |
|
545 | if self.keyfile: | |
547 | ssl_options['keyfile'] = self.keyfile |
|
546 | ssl_options['keyfile'] = self.keyfile | |
548 | else: |
|
547 | else: | |
549 | ssl_options = None |
|
548 | ssl_options = None | |
550 | self.web_app.password = self.password |
|
549 | self.web_app.password = self.password | |
551 | self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options, |
|
550 | self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options, | |
552 | xheaders=self.trust_xheaders) |
|
551 | xheaders=self.trust_xheaders) | |
553 | if not self.ip: |
|
552 | if not self.ip: | |
554 | warning = "WARNING: The notebook server is listening on all IP addresses" |
|
553 | warning = "WARNING: The notebook server is listening on all IP addresses" | |
555 | if ssl_options is None: |
|
554 | if ssl_options is None: | |
556 | self.log.critical(warning + " and not using encryption. This " |
|
555 | self.log.critical(warning + " and not using encryption. This " | |
557 | "is not recommended.") |
|
556 | "is not recommended.") | |
558 | if not self.password and not self.read_only: |
|
557 | if not self.password and not self.read_only: | |
559 | self.log.critical(warning + " and not using authentication. " |
|
558 | self.log.critical(warning + " and not using authentication. " | |
560 | "This is highly insecure and not recommended.") |
|
559 | "This is highly insecure and not recommended.") | |
561 | success = None |
|
560 | success = None | |
562 | for port in random_ports(self.port, self.port_retries+1): |
|
561 | for port in random_ports(self.port, self.port_retries+1): | |
563 | try: |
|
562 | try: | |
564 | self.http_server.listen(port, self.ip) |
|
563 | self.http_server.listen(port, self.ip) | |
565 | except socket.error as e: |
|
564 | except socket.error as e: | |
566 | # XXX: remove the e.errno == -9 block when we require |
|
565 | # XXX: remove the e.errno == -9 block when we require | |
567 | # tornado >= 3.0 |
|
566 | # tornado >= 3.0 | |
568 | if e.errno == -9 and tornado.version_info[0] < 3: |
|
567 | if e.errno == -9 and tornado.version_info[0] < 3: | |
569 | # The flags passed to socket.getaddrinfo from |
|
568 | # The flags passed to socket.getaddrinfo from | |
570 | # tornado.netutils.bind_sockets can cause "gaierror: |
|
569 | # tornado.netutils.bind_sockets can cause "gaierror: | |
571 | # [Errno -9] Address family for hostname not supported" |
|
570 | # [Errno -9] Address family for hostname not supported" | |
572 | # when the interface is not associated, for example. |
|
571 | # when the interface is not associated, for example. | |
573 | # Changing the flags to exclude socket.AI_ADDRCONFIG does |
|
572 | # Changing the flags to exclude socket.AI_ADDRCONFIG does | |
574 | # not cause this error, but the only way to do this is to |
|
573 | # not cause this error, but the only way to do this is to | |
575 | # monkeypatch socket to remove the AI_ADDRCONFIG attribute |
|
574 | # monkeypatch socket to remove the AI_ADDRCONFIG attribute | |
576 | saved_AI_ADDRCONFIG = socket.AI_ADDRCONFIG |
|
575 | saved_AI_ADDRCONFIG = socket.AI_ADDRCONFIG | |
577 | self.log.warn('Monkeypatching socket to fix tornado bug') |
|
576 | self.log.warn('Monkeypatching socket to fix tornado bug') | |
578 | del(socket.AI_ADDRCONFIG) |
|
577 | del(socket.AI_ADDRCONFIG) | |
579 | try: |
|
578 | try: | |
580 | # retry the tornado call without AI_ADDRCONFIG flags |
|
579 | # retry the tornado call without AI_ADDRCONFIG flags | |
581 | self.http_server.listen(port, self.ip) |
|
580 | self.http_server.listen(port, self.ip) | |
582 | except socket.error as e2: |
|
581 | except socket.error as e2: | |
583 | e = e2 |
|
582 | e = e2 | |
584 | else: |
|
583 | else: | |
585 | self.port = port |
|
584 | self.port = port | |
586 | success = True |
|
585 | success = True | |
587 | break |
|
586 | break | |
588 | # restore the monekypatch |
|
587 | # restore the monekypatch | |
589 | socket.AI_ADDRCONFIG = saved_AI_ADDRCONFIG |
|
588 | socket.AI_ADDRCONFIG = saved_AI_ADDRCONFIG | |
590 | if e.errno != errno.EADDRINUSE: |
|
589 | if e.errno != errno.EADDRINUSE: | |
591 | raise |
|
590 | raise | |
592 | self.log.info('The port %i is already in use, trying another random port.' % port) |
|
591 | self.log.info('The port %i is already in use, trying another random port.' % port) | |
593 | else: |
|
592 | else: | |
594 | self.port = port |
|
593 | self.port = port | |
595 | success = True |
|
594 | success = True | |
596 | break |
|
595 | break | |
597 | if not success: |
|
596 | if not success: | |
598 | self.log.critical('ERROR: the notebook server could not be started because ' |
|
597 | self.log.critical('ERROR: the notebook server could not be started because ' | |
599 | 'no available port could be found.') |
|
598 | 'no available port could be found.') | |
600 | self.exit(1) |
|
599 | self.exit(1) | |
601 |
|
600 | |||
602 | def init_signal(self): |
|
601 | def init_signal(self): | |
603 | if not sys.platform.startswith('win'): |
|
602 | if not sys.platform.startswith('win'): | |
604 | signal.signal(signal.SIGINT, self._handle_sigint) |
|
603 | signal.signal(signal.SIGINT, self._handle_sigint) | |
605 | signal.signal(signal.SIGTERM, self._signal_stop) |
|
604 | signal.signal(signal.SIGTERM, self._signal_stop) | |
606 | if hasattr(signal, 'SIGUSR1'): |
|
605 | if hasattr(signal, 'SIGUSR1'): | |
607 | # Windows doesn't support SIGUSR1 |
|
606 | # Windows doesn't support SIGUSR1 | |
608 | signal.signal(signal.SIGUSR1, self._signal_info) |
|
607 | signal.signal(signal.SIGUSR1, self._signal_info) | |
609 | if hasattr(signal, 'SIGINFO'): |
|
608 | if hasattr(signal, 'SIGINFO'): | |
610 | # only on BSD-based systems |
|
609 | # only on BSD-based systems | |
611 | signal.signal(signal.SIGINFO, self._signal_info) |
|
610 | signal.signal(signal.SIGINFO, self._signal_info) | |
612 |
|
611 | |||
613 | def _handle_sigint(self, sig, frame): |
|
612 | def _handle_sigint(self, sig, frame): | |
614 | """SIGINT handler spawns confirmation dialog""" |
|
613 | """SIGINT handler spawns confirmation dialog""" | |
615 | # register more forceful signal handler for ^C^C case |
|
614 | # register more forceful signal handler for ^C^C case | |
616 | signal.signal(signal.SIGINT, self._signal_stop) |
|
615 | signal.signal(signal.SIGINT, self._signal_stop) | |
617 | # request confirmation dialog in bg thread, to avoid |
|
616 | # request confirmation dialog in bg thread, to avoid | |
618 | # blocking the App |
|
617 | # blocking the App | |
619 | thread = threading.Thread(target=self._confirm_exit) |
|
618 | thread = threading.Thread(target=self._confirm_exit) | |
620 | thread.daemon = True |
|
619 | thread.daemon = True | |
621 | thread.start() |
|
620 | thread.start() | |
622 |
|
621 | |||
623 | def _restore_sigint_handler(self): |
|
622 | def _restore_sigint_handler(self): | |
624 | """callback for restoring original SIGINT handler""" |
|
623 | """callback for restoring original SIGINT handler""" | |
625 | signal.signal(signal.SIGINT, self._handle_sigint) |
|
624 | signal.signal(signal.SIGINT, self._handle_sigint) | |
626 |
|
625 | |||
627 | def _confirm_exit(self): |
|
626 | def _confirm_exit(self): | |
628 | """confirm shutdown on ^C |
|
627 | """confirm shutdown on ^C | |
629 |
|
628 | |||
630 | A second ^C, or answering 'y' within 5s will cause shutdown, |
|
629 | A second ^C, or answering 'y' within 5s will cause shutdown, | |
631 | otherwise original SIGINT handler will be restored. |
|
630 | otherwise original SIGINT handler will be restored. | |
632 |
|
631 | |||
633 | This doesn't work on Windows. |
|
632 | This doesn't work on Windows. | |
634 | """ |
|
633 | """ | |
635 | # FIXME: remove this delay when pyzmq dependency is >= 2.1.11 |
|
634 | # FIXME: remove this delay when pyzmq dependency is >= 2.1.11 | |
636 | time.sleep(0.1) |
|
635 | time.sleep(0.1) | |
637 | info = self.log.info |
|
636 | info = self.log.info | |
638 | info('interrupted') |
|
637 | info('interrupted') | |
639 | print self.notebook_info() |
|
638 | print self.notebook_info() | |
640 | sys.stdout.write("Shutdown this notebook server (y/[n])? ") |
|
639 | sys.stdout.write("Shutdown this notebook server (y/[n])? ") | |
641 | sys.stdout.flush() |
|
640 | sys.stdout.flush() | |
642 | r,w,x = select.select([sys.stdin], [], [], 5) |
|
641 | r,w,x = select.select([sys.stdin], [], [], 5) | |
643 | if r: |
|
642 | if r: | |
644 | line = sys.stdin.readline() |
|
643 | line = sys.stdin.readline() | |
645 | if line.lower().startswith('y'): |
|
644 | if line.lower().startswith('y'): | |
646 | self.log.critical("Shutdown confirmed") |
|
645 | self.log.critical("Shutdown confirmed") | |
647 | ioloop.IOLoop.instance().stop() |
|
646 | ioloop.IOLoop.instance().stop() | |
648 | return |
|
647 | return | |
649 | else: |
|
648 | else: | |
650 | print "No answer for 5s:", |
|
649 | print "No answer for 5s:", | |
651 | print "resuming operation..." |
|
650 | print "resuming operation..." | |
652 | # no answer, or answer is no: |
|
651 | # no answer, or answer is no: | |
653 | # set it back to original SIGINT handler |
|
652 | # set it back to original SIGINT handler | |
654 | # use IOLoop.add_callback because signal.signal must be called |
|
653 | # use IOLoop.add_callback because signal.signal must be called | |
655 | # from main thread |
|
654 | # from main thread | |
656 | ioloop.IOLoop.instance().add_callback(self._restore_sigint_handler) |
|
655 | ioloop.IOLoop.instance().add_callback(self._restore_sigint_handler) | |
657 |
|
656 | |||
658 | def _signal_stop(self, sig, frame): |
|
657 | def _signal_stop(self, sig, frame): | |
659 | self.log.critical("received signal %s, stopping", sig) |
|
658 | self.log.critical("received signal %s, stopping", sig) | |
660 | ioloop.IOLoop.instance().stop() |
|
659 | ioloop.IOLoop.instance().stop() | |
661 |
|
660 | |||
662 | def _signal_info(self, sig, frame): |
|
661 | def _signal_info(self, sig, frame): | |
663 | print self.notebook_info() |
|
662 | print self.notebook_info() | |
664 |
|
663 | |||
665 | def init_components(self): |
|
664 | def init_components(self): | |
666 | """Check the components submodule, and warn if it's unclean""" |
|
665 | """Check the components submodule, and warn if it's unclean""" | |
667 | status = submodule.check_submodule_status() |
|
666 | status = submodule.check_submodule_status() | |
668 | if status == 'missing': |
|
667 | if status == 'missing': | |
669 | self.log.warn("components submodule missing, running `git submodule update`") |
|
668 | self.log.warn("components submodule missing, running `git submodule update`") | |
670 | submodule.update_submodules(submodule.ipython_parent()) |
|
669 | submodule.update_submodules(submodule.ipython_parent()) | |
671 | elif status == 'unclean': |
|
670 | elif status == 'unclean': | |
672 | self.log.warn("components submodule unclean, you may see 404s on static/components") |
|
671 | self.log.warn("components submodule unclean, you may see 404s on static/components") | |
673 | self.log.warn("run `setup.py submodule` or `git submodule update` to update") |
|
672 | self.log.warn("run `setup.py submodule` or `git submodule update` to update") | |
674 |
|
673 | |||
675 |
|
674 | |||
676 | @catch_config_error |
|
675 | @catch_config_error | |
677 | def initialize(self, argv=None): |
|
676 | def initialize(self, argv=None): | |
678 | self.init_logging() |
|
677 | self.init_logging() | |
679 | super(NotebookApp, self).initialize(argv) |
|
678 | super(NotebookApp, self).initialize(argv) | |
680 | self.init_configurables() |
|
679 | self.init_configurables() | |
681 | self.init_components() |
|
680 | self.init_components() | |
682 | self.init_webapp() |
|
681 | self.init_webapp() | |
683 | self.init_signal() |
|
682 | self.init_signal() | |
684 |
|
683 | |||
685 | def cleanup_kernels(self): |
|
684 | def cleanup_kernels(self): | |
686 | """Shutdown all kernels. |
|
685 | """Shutdown all kernels. | |
687 |
|
686 | |||
688 | The kernels will shutdown themselves when this process no longer exists, |
|
687 | The kernels will shutdown themselves when this process no longer exists, | |
689 | but explicit shutdown allows the KernelManagers to cleanup the connection files. |
|
688 | but explicit shutdown allows the KernelManagers to cleanup the connection files. | |
690 | """ |
|
689 | """ | |
691 | self.log.info('Shutting down kernels') |
|
690 | self.log.info('Shutting down kernels') | |
692 | self.kernel_manager.shutdown_all() |
|
691 | self.kernel_manager.shutdown_all() | |
693 |
|
692 | |||
694 | def notebook_info(self): |
|
693 | def notebook_info(self): | |
695 | "Return the current working directory and the server url information" |
|
694 | "Return the current working directory and the server url information" | |
696 | mgr_info = self.notebook_manager.info_string() + "\n" |
|
695 | mgr_info = self.notebook_manager.info_string() + "\n" | |
697 | return mgr_info +"The IPython Notebook is running at: %s" % self._url |
|
696 | return mgr_info +"The IPython Notebook is running at: %s" % self._url | |
698 |
|
697 | |||
699 | def start(self): |
|
698 | def start(self): | |
700 | """ Start the IPython Notebook server app, after initialization |
|
699 | """ Start the IPython Notebook server app, after initialization | |
701 |
|
700 | |||
702 | This method takes no arguments so all configuration and initialization |
|
701 | This method takes no arguments so all configuration and initialization | |
703 | must be done prior to calling this method.""" |
|
702 | must be done prior to calling this method.""" | |
704 | ip = self.ip if self.ip else '[all ip addresses on your system]' |
|
703 | ip = self.ip if self.ip else '[all ip addresses on your system]' | |
705 | proto = 'https' if self.certfile else 'http' |
|
704 | proto = 'https' if self.certfile else 'http' | |
706 | info = self.log.info |
|
705 | info = self.log.info | |
707 | self._url = "%s://%s:%i%s" % (proto, ip, self.port, |
|
706 | self._url = "%s://%s:%i%s" % (proto, ip, self.port, | |
708 | self.base_project_url) |
|
707 | self.base_project_url) | |
709 | for line in self.notebook_info().split("\n"): |
|
708 | for line in self.notebook_info().split("\n"): | |
710 | info(line) |
|
709 | info(line) | |
711 | info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).") |
|
710 | info("Use Control-C to stop this server and shut down all kernels (twice to skip confirmation).") | |
712 |
|
711 | |||
713 | if self.open_browser or self.file_to_run: |
|
712 | if self.open_browser or self.file_to_run: | |
714 | ip = self.ip or LOCALHOST |
|
713 | ip = self.ip or LOCALHOST | |
715 | try: |
|
714 | try: | |
716 | browser = webbrowser.get(self.browser or None) |
|
715 | browser = webbrowser.get(self.browser or None) | |
717 | except webbrowser.Error as e: |
|
716 | except webbrowser.Error as e: | |
718 | self.log.warn('No web browser found: %s.' % e) |
|
717 | self.log.warn('No web browser found: %s.' % e) | |
719 | browser = None |
|
718 | browser = None | |
720 |
|
719 | |||
721 | if self.file_to_run: |
|
720 | if self.file_to_run: | |
722 | name, _ = os.path.splitext(os.path.basename(self.file_to_run)) |
|
721 | name, _ = os.path.splitext(os.path.basename(self.file_to_run)) | |
723 | url = self.notebook_manager.rev_mapping.get(name, '') |
|
722 | url = self.notebook_manager.rev_mapping.get(name, '') | |
724 | else: |
|
723 | else: | |
725 | url = '' |
|
724 | url = '' | |
726 | if browser: |
|
725 | if browser: | |
727 | b = lambda : browser.open("%s://%s:%i%s%s" % (proto, ip, |
|
726 | b = lambda : browser.open("%s://%s:%i%s%s" % (proto, ip, | |
728 | self.port, self.base_project_url, url), new=2) |
|
727 | self.port, self.base_project_url, url), new=2) | |
729 | threading.Thread(target=b).start() |
|
728 | threading.Thread(target=b).start() | |
730 | try: |
|
729 | try: | |
731 | ioloop.IOLoop.instance().start() |
|
730 | ioloop.IOLoop.instance().start() | |
732 | except KeyboardInterrupt: |
|
731 | except KeyboardInterrupt: | |
733 | info("Interrupted...") |
|
732 | info("Interrupted...") | |
734 | finally: |
|
733 | finally: | |
735 | self.cleanup_kernels() |
|
734 | self.cleanup_kernels() | |
736 |
|
735 | |||
737 |
|
736 | |||
738 | #----------------------------------------------------------------------------- |
|
737 | #----------------------------------------------------------------------------- | |
739 | # Main entry point |
|
738 | # Main entry point | |
740 | #----------------------------------------------------------------------------- |
|
739 | #----------------------------------------------------------------------------- | |
741 |
|
740 | |||
742 | launch_new_instance = NotebookApp.launch_instance |
|
741 | launch_new_instance = NotebookApp.launch_instance | |
743 |
|
742 |
@@ -1,455 +1,459 | |||||
1 | """An Application for launching a kernel |
|
1 | """An Application for launching a kernel | |
2 |
|
2 | |||
3 | Authors |
|
3 | Authors | |
4 | ------- |
|
4 | ------- | |
5 | * MinRK |
|
5 | * MinRK | |
6 | """ |
|
6 | """ | |
7 | #----------------------------------------------------------------------------- |
|
7 | #----------------------------------------------------------------------------- | |
8 | # Copyright (C) 2011 The IPython Development Team |
|
8 | # Copyright (C) 2011 The IPython Development Team | |
9 | # |
|
9 | # | |
10 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | # Distributed under the terms of the BSD License. The full license is in | |
11 | # the file COPYING.txt, distributed as part of this software. |
|
11 | # the file COPYING.txt, distributed as part of this software. | |
12 | #----------------------------------------------------------------------------- |
|
12 | #----------------------------------------------------------------------------- | |
13 |
|
13 | |||
14 | #----------------------------------------------------------------------------- |
|
14 | #----------------------------------------------------------------------------- | |
15 | # Imports |
|
15 | # Imports | |
16 | #----------------------------------------------------------------------------- |
|
16 | #----------------------------------------------------------------------------- | |
17 |
|
17 | |||
18 | from __future__ import print_function |
|
18 | from __future__ import print_function | |
19 |
|
19 | |||
20 | # Standard library imports |
|
20 | # Standard library imports | |
21 | import atexit |
|
21 | import atexit | |
22 | import json |
|
22 | import json | |
23 | import os |
|
23 | import os | |
24 | import sys |
|
24 | import sys | |
25 | import signal |
|
25 | import signal | |
26 |
|
26 | |||
27 | # System library imports |
|
27 | # System library imports | |
28 | import zmq |
|
28 | import zmq | |
29 | from zmq.eventloop import ioloop |
|
29 | from zmq.eventloop import ioloop | |
30 | from zmq.eventloop.zmqstream import ZMQStream |
|
30 | from zmq.eventloop.zmqstream import ZMQStream | |
31 |
|
31 | |||
32 | # IPython imports |
|
32 | # IPython imports | |
33 | from IPython.core.ultratb import FormattedTB |
|
33 | from IPython.core.ultratb import FormattedTB | |
34 | from IPython.core.application import ( |
|
34 | from IPython.core.application import ( | |
35 | BaseIPythonApplication, base_flags, base_aliases, catch_config_error |
|
35 | BaseIPythonApplication, base_flags, base_aliases, catch_config_error | |
36 | ) |
|
36 | ) | |
37 | from IPython.core.profiledir import ProfileDir |
|
37 | from IPython.core.profiledir import ProfileDir | |
38 | from IPython.core.shellapp import ( |
|
38 | from IPython.core.shellapp import ( | |
39 | InteractiveShellApp, shell_flags, shell_aliases |
|
39 | InteractiveShellApp, shell_flags, shell_aliases | |
40 | ) |
|
40 | ) | |
41 | from IPython.utils import io |
|
41 | from IPython.utils import io | |
42 | from IPython.utils.localinterfaces import LOCALHOST |
|
42 | from IPython.utils.localinterfaces import LOCALHOST | |
43 | from IPython.utils.path import filefind |
|
43 | from IPython.utils.path import filefind | |
44 | from IPython.utils.py3compat import str_to_bytes |
|
44 | from IPython.utils.py3compat import str_to_bytes | |
45 | from IPython.utils.traitlets import ( |
|
45 | from IPython.utils.traitlets import ( | |
46 | Any, Instance, Dict, Unicode, Integer, Bool, CaselessStrEnum, |
|
46 | Any, Instance, Dict, Unicode, Integer, Bool, CaselessStrEnum, | |
47 | DottedObjectName, |
|
47 | DottedObjectName, | |
48 | ) |
|
48 | ) | |
49 | from IPython.utils.importstring import import_item |
|
49 | from IPython.utils.importstring import import_item | |
50 | from IPython.kernel import write_connection_file |
|
50 | from IPython.kernel import write_connection_file | |
51 |
|
51 | |||
52 | # local imports |
|
52 | # local imports | |
53 | from heartbeat import Heartbeat |
|
53 | from heartbeat import Heartbeat | |
54 | from ipkernel import Kernel |
|
54 | from ipkernel import Kernel | |
55 | from parentpoller import ParentPollerUnix, ParentPollerWindows |
|
55 | from parentpoller import ParentPollerUnix, ParentPollerWindows | |
56 | from session import ( |
|
56 | from session import ( | |
57 | Session, session_flags, session_aliases, default_secure, |
|
57 | Session, session_flags, session_aliases, default_secure, | |
58 | ) |
|
58 | ) | |
59 | from zmqshell import ZMQInteractiveShell |
|
59 | from zmqshell import ZMQInteractiveShell | |
60 |
|
60 | |||
61 | #----------------------------------------------------------------------------- |
|
61 | #----------------------------------------------------------------------------- | |
62 | # Flags and Aliases |
|
62 | # Flags and Aliases | |
63 | #----------------------------------------------------------------------------- |
|
63 | #----------------------------------------------------------------------------- | |
64 |
|
64 | |||
65 | kernel_aliases = dict(base_aliases) |
|
65 | kernel_aliases = dict(base_aliases) | |
66 | kernel_aliases.update({ |
|
66 | kernel_aliases.update({ | |
67 | 'ip' : 'IPKernelApp.ip', |
|
67 | 'ip' : 'IPKernelApp.ip', | |
68 | 'hb' : 'IPKernelApp.hb_port', |
|
68 | 'hb' : 'IPKernelApp.hb_port', | |
69 | 'shell' : 'IPKernelApp.shell_port', |
|
69 | 'shell' : 'IPKernelApp.shell_port', | |
70 | 'iopub' : 'IPKernelApp.iopub_port', |
|
70 | 'iopub' : 'IPKernelApp.iopub_port', | |
71 | 'stdin' : 'IPKernelApp.stdin_port', |
|
71 | 'stdin' : 'IPKernelApp.stdin_port', | |
72 | 'control' : 'IPKernelApp.control_port', |
|
72 | 'control' : 'IPKernelApp.control_port', | |
73 | 'f' : 'IPKernelApp.connection_file', |
|
73 | 'f' : 'IPKernelApp.connection_file', | |
74 | 'parent': 'IPKernelApp.parent_handle', |
|
74 | 'parent': 'IPKernelApp.parent_handle', | |
75 | 'transport': 'IPKernelApp.transport', |
|
75 | 'transport': 'IPKernelApp.transport', | |
76 | }) |
|
76 | }) | |
77 | if sys.platform.startswith('win'): |
|
77 | if sys.platform.startswith('win'): | |
78 | kernel_aliases['interrupt'] = 'IPKernelApp.interrupt' |
|
78 | kernel_aliases['interrupt'] = 'IPKernelApp.interrupt' | |
79 |
|
79 | |||
80 | kernel_flags = dict(base_flags) |
|
80 | kernel_flags = dict(base_flags) | |
81 | kernel_flags.update({ |
|
81 | kernel_flags.update({ | |
82 | 'no-stdout' : ( |
|
82 | 'no-stdout' : ( | |
83 | {'IPKernelApp' : {'no_stdout' : True}}, |
|
83 | {'IPKernelApp' : {'no_stdout' : True}}, | |
84 | "redirect stdout to the null device"), |
|
84 | "redirect stdout to the null device"), | |
85 | 'no-stderr' : ( |
|
85 | 'no-stderr' : ( | |
86 | {'IPKernelApp' : {'no_stderr' : True}}, |
|
86 | {'IPKernelApp' : {'no_stderr' : True}}, | |
87 | "redirect stderr to the null device"), |
|
87 | "redirect stderr to the null device"), | |
88 | 'pylab' : ( |
|
88 | 'pylab' : ( | |
89 | {'IPKernelApp' : {'pylab' : 'auto'}}, |
|
89 | {'IPKernelApp' : {'pylab' : 'auto'}}, | |
90 | """Pre-load matplotlib and numpy for interactive use with |
|
90 | """Pre-load matplotlib and numpy for interactive use with | |
91 | the default matplotlib backend."""), |
|
91 | the default matplotlib backend."""), | |
92 | }) |
|
92 | }) | |
93 |
|
93 | |||
94 | # inherit flags&aliases for any IPython shell apps |
|
94 | # inherit flags&aliases for any IPython shell apps | |
95 | kernel_aliases.update(shell_aliases) |
|
95 | kernel_aliases.update(shell_aliases) | |
96 | kernel_flags.update(shell_flags) |
|
96 | kernel_flags.update(shell_flags) | |
97 |
|
97 | |||
98 | # inherit flags&aliases for Sessions |
|
98 | # inherit flags&aliases for Sessions | |
99 | kernel_aliases.update(session_aliases) |
|
99 | kernel_aliases.update(session_aliases) | |
100 | kernel_flags.update(session_flags) |
|
100 | kernel_flags.update(session_flags) | |
101 |
|
101 | |||
102 | #----------------------------------------------------------------------------- |
|
102 | #----------------------------------------------------------------------------- | |
103 | # Application class for starting an IPython Kernel |
|
103 | # Application class for starting an IPython Kernel | |
104 | #----------------------------------------------------------------------------- |
|
104 | #----------------------------------------------------------------------------- | |
105 |
|
105 | |||
106 | class IPKernelApp(BaseIPythonApplication, InteractiveShellApp): |
|
106 | class IPKernelApp(BaseIPythonApplication, InteractiveShellApp): | |
107 | name='ipkernel' |
|
107 | name='ipkernel' | |
108 | aliases = Dict(kernel_aliases) |
|
108 | aliases = Dict(kernel_aliases) | |
109 | flags = Dict(kernel_flags) |
|
109 | flags = Dict(kernel_flags) | |
110 | classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session] |
|
110 | classes = [Kernel, ZMQInteractiveShell, ProfileDir, Session] | |
111 | # the kernel class, as an importstring |
|
111 | # the kernel class, as an importstring | |
112 | kernel_class = DottedObjectName('IPython.kernel.zmq.ipkernel.Kernel', config=True, |
|
112 | kernel_class = DottedObjectName('IPython.kernel.zmq.ipkernel.Kernel', config=True, | |
113 | help="""The Kernel subclass to be used. |
|
113 | help="""The Kernel subclass to be used. | |
114 |
|
114 | |||
115 | This should allow easy re-use of the IPKernelApp entry point |
|
115 | This should allow easy re-use of the IPKernelApp entry point | |
116 | to configure and launch kernels other than IPython's own. |
|
116 | to configure and launch kernels other than IPython's own. | |
117 | """) |
|
117 | """) | |
118 | kernel = Any() |
|
118 | kernel = Any() | |
119 | poller = Any() # don't restrict this even though current pollers are all Threads |
|
119 | poller = Any() # don't restrict this even though current pollers are all Threads | |
120 | heartbeat = Instance(Heartbeat) |
|
120 | heartbeat = Instance(Heartbeat) | |
121 | session = Instance('IPython.kernel.zmq.session.Session') |
|
121 | session = Instance('IPython.kernel.zmq.session.Session') | |
122 | ports = Dict() |
|
122 | ports = Dict() | |
123 |
|
123 | |||
|
124 | # ipkernel doesn't get its own config file | |||
|
125 | def _config_file_name_default(self): | |||
|
126 | return 'ipython_config.py' | |||
|
127 | ||||
124 | # inherit config file name from parent: |
|
128 | # inherit config file name from parent: | |
125 | parent_appname = Unicode(config=True) |
|
129 | parent_appname = Unicode(config=True) | |
126 | def _parent_appname_changed(self, name, old, new): |
|
130 | def _parent_appname_changed(self, name, old, new): | |
127 | if self.config_file_specified: |
|
131 | if self.config_file_specified: | |
128 | # it was manually specified, ignore |
|
132 | # it was manually specified, ignore | |
129 | return |
|
133 | return | |
130 | self.config_file_name = new.replace('-','_') + u'_config.py' |
|
134 | self.config_file_name = new.replace('-','_') + u'_config.py' | |
131 | # don't let this count as specifying the config file |
|
135 | # don't let this count as specifying the config file | |
132 | self.config_file_specified.remove(self.config_file_name) |
|
136 | self.config_file_specified.remove(self.config_file_name) | |
133 |
|
137 | |||
134 | # connection info: |
|
138 | # connection info: | |
135 | transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True) |
|
139 | transport = CaselessStrEnum(['tcp', 'ipc'], default_value='tcp', config=True) | |
136 | ip = Unicode(config=True, |
|
140 | ip = Unicode(config=True, | |
137 | help="Set the IP or interface on which the kernel will listen.") |
|
141 | help="Set the IP or interface on which the kernel will listen.") | |
138 | def _ip_default(self): |
|
142 | def _ip_default(self): | |
139 | if self.transport == 'ipc': |
|
143 | if self.transport == 'ipc': | |
140 | if self.connection_file: |
|
144 | if self.connection_file: | |
141 | return os.path.splitext(self.abs_connection_file)[0] + '-ipc' |
|
145 | return os.path.splitext(self.abs_connection_file)[0] + '-ipc' | |
142 | else: |
|
146 | else: | |
143 | return 'kernel-ipc' |
|
147 | return 'kernel-ipc' | |
144 | else: |
|
148 | else: | |
145 | return LOCALHOST |
|
149 | return LOCALHOST | |
146 | hb_port = Integer(0, config=True, help="set the heartbeat port [default: random]") |
|
150 | hb_port = Integer(0, config=True, help="set the heartbeat port [default: random]") | |
147 | shell_port = Integer(0, config=True, help="set the shell (ROUTER) port [default: random]") |
|
151 | shell_port = Integer(0, config=True, help="set the shell (ROUTER) port [default: random]") | |
148 | iopub_port = Integer(0, config=True, help="set the iopub (PUB) port [default: random]") |
|
152 | iopub_port = Integer(0, config=True, help="set the iopub (PUB) port [default: random]") | |
149 | stdin_port = Integer(0, config=True, help="set the stdin (ROUTER) port [default: random]") |
|
153 | stdin_port = Integer(0, config=True, help="set the stdin (ROUTER) port [default: random]") | |
150 | control_port = Integer(0, config=True, help="set the control (ROUTER) port [default: random]") |
|
154 | control_port = Integer(0, config=True, help="set the control (ROUTER) port [default: random]") | |
151 | connection_file = Unicode('', config=True, |
|
155 | connection_file = Unicode('', config=True, | |
152 | help="""JSON file in which to store connection info [default: kernel-<pid>.json] |
|
156 | help="""JSON file in which to store connection info [default: kernel-<pid>.json] | |
153 |
|
157 | |||
154 | This file will contain the IP, ports, and authentication key needed to connect |
|
158 | This file will contain the IP, ports, and authentication key needed to connect | |
155 | clients to this kernel. By default, this file will be created in the security dir |
|
159 | clients to this kernel. By default, this file will be created in the security dir | |
156 | of the current profile, but can be specified by absolute path. |
|
160 | of the current profile, but can be specified by absolute path. | |
157 | """) |
|
161 | """) | |
158 | @property |
|
162 | @property | |
159 | def abs_connection_file(self): |
|
163 | def abs_connection_file(self): | |
160 | if os.path.basename(self.connection_file) == self.connection_file: |
|
164 | if os.path.basename(self.connection_file) == self.connection_file: | |
161 | return os.path.join(self.profile_dir.security_dir, self.connection_file) |
|
165 | return os.path.join(self.profile_dir.security_dir, self.connection_file) | |
162 | else: |
|
166 | else: | |
163 | return self.connection_file |
|
167 | return self.connection_file | |
164 |
|
168 | |||
165 |
|
169 | |||
166 | # streams, etc. |
|
170 | # streams, etc. | |
167 | no_stdout = Bool(False, config=True, help="redirect stdout to the null device") |
|
171 | no_stdout = Bool(False, config=True, help="redirect stdout to the null device") | |
168 | no_stderr = Bool(False, config=True, help="redirect stderr to the null device") |
|
172 | no_stderr = Bool(False, config=True, help="redirect stderr to the null device") | |
169 | outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream', |
|
173 | outstream_class = DottedObjectName('IPython.kernel.zmq.iostream.OutStream', | |
170 | config=True, help="The importstring for the OutStream factory") |
|
174 | config=True, help="The importstring for the OutStream factory") | |
171 | displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook', |
|
175 | displayhook_class = DottedObjectName('IPython.kernel.zmq.displayhook.ZMQDisplayHook', | |
172 | config=True, help="The importstring for the DisplayHook factory") |
|
176 | config=True, help="The importstring for the DisplayHook factory") | |
173 |
|
177 | |||
174 | # polling |
|
178 | # polling | |
175 | parent_handle = Integer(0, config=True, |
|
179 | parent_handle = Integer(0, config=True, | |
176 | help="""kill this process if its parent dies. On Windows, the argument |
|
180 | help="""kill this process if its parent dies. On Windows, the argument | |
177 | specifies the HANDLE of the parent process, otherwise it is simply boolean. |
|
181 | specifies the HANDLE of the parent process, otherwise it is simply boolean. | |
178 | """) |
|
182 | """) | |
179 | interrupt = Integer(0, config=True, |
|
183 | interrupt = Integer(0, config=True, | |
180 | help="""ONLY USED ON WINDOWS |
|
184 | help="""ONLY USED ON WINDOWS | |
181 | Interrupt this process when the parent is signaled. |
|
185 | Interrupt this process when the parent is signaled. | |
182 | """) |
|
186 | """) | |
183 |
|
187 | |||
184 | def init_crash_handler(self): |
|
188 | def init_crash_handler(self): | |
185 | # Install minimal exception handling |
|
189 | # Install minimal exception handling | |
186 | sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor', |
|
190 | sys.excepthook = FormattedTB(mode='Verbose', color_scheme='NoColor', | |
187 | ostream=sys.__stdout__) |
|
191 | ostream=sys.__stdout__) | |
188 |
|
192 | |||
189 | def init_poller(self): |
|
193 | def init_poller(self): | |
190 | if sys.platform == 'win32': |
|
194 | if sys.platform == 'win32': | |
191 | if self.interrupt or self.parent_handle: |
|
195 | if self.interrupt or self.parent_handle: | |
192 | self.poller = ParentPollerWindows(self.interrupt, self.parent_handle) |
|
196 | self.poller = ParentPollerWindows(self.interrupt, self.parent_handle) | |
193 | elif self.parent_handle: |
|
197 | elif self.parent_handle: | |
194 | self.poller = ParentPollerUnix() |
|
198 | self.poller = ParentPollerUnix() | |
195 |
|
199 | |||
196 | def _bind_socket(self, s, port): |
|
200 | def _bind_socket(self, s, port): | |
197 | iface = '%s://%s' % (self.transport, self.ip) |
|
201 | iface = '%s://%s' % (self.transport, self.ip) | |
198 | if self.transport == 'tcp': |
|
202 | if self.transport == 'tcp': | |
199 | if port <= 0: |
|
203 | if port <= 0: | |
200 | port = s.bind_to_random_port(iface) |
|
204 | port = s.bind_to_random_port(iface) | |
201 | else: |
|
205 | else: | |
202 | s.bind("tcp://%s:%i" % (self.ip, port)) |
|
206 | s.bind("tcp://%s:%i" % (self.ip, port)) | |
203 | elif self.transport == 'ipc': |
|
207 | elif self.transport == 'ipc': | |
204 | if port <= 0: |
|
208 | if port <= 0: | |
205 | port = 1 |
|
209 | port = 1 | |
206 | path = "%s-%i" % (self.ip, port) |
|
210 | path = "%s-%i" % (self.ip, port) | |
207 | while os.path.exists(path): |
|
211 | while os.path.exists(path): | |
208 | port = port + 1 |
|
212 | port = port + 1 | |
209 | path = "%s-%i" % (self.ip, port) |
|
213 | path = "%s-%i" % (self.ip, port) | |
210 | else: |
|
214 | else: | |
211 | path = "%s-%i" % (self.ip, port) |
|
215 | path = "%s-%i" % (self.ip, port) | |
212 | s.bind("ipc://%s" % path) |
|
216 | s.bind("ipc://%s" % path) | |
213 | return port |
|
217 | return port | |
214 |
|
218 | |||
215 | def load_connection_file(self): |
|
219 | def load_connection_file(self): | |
216 | """load ip/port/hmac config from JSON connection file""" |
|
220 | """load ip/port/hmac config from JSON connection file""" | |
217 | try: |
|
221 | try: | |
218 | fname = filefind(self.connection_file, ['.', self.profile_dir.security_dir]) |
|
222 | fname = filefind(self.connection_file, ['.', self.profile_dir.security_dir]) | |
219 | except IOError: |
|
223 | except IOError: | |
220 | self.log.debug("Connection file not found: %s", self.connection_file) |
|
224 | self.log.debug("Connection file not found: %s", self.connection_file) | |
221 | # This means I own it, so I will clean it up: |
|
225 | # This means I own it, so I will clean it up: | |
222 | atexit.register(self.cleanup_connection_file) |
|
226 | atexit.register(self.cleanup_connection_file) | |
223 | return |
|
227 | return | |
224 | self.log.debug(u"Loading connection file %s", fname) |
|
228 | self.log.debug(u"Loading connection file %s", fname) | |
225 | with open(fname) as f: |
|
229 | with open(fname) as f: | |
226 | s = f.read() |
|
230 | s = f.read() | |
227 | cfg = json.loads(s) |
|
231 | cfg = json.loads(s) | |
228 | self.transport = cfg.get('transport', self.transport) |
|
232 | self.transport = cfg.get('transport', self.transport) | |
229 | if self.ip == self._ip_default() and 'ip' in cfg: |
|
233 | if self.ip == self._ip_default() and 'ip' in cfg: | |
230 | # not overridden by config or cl_args |
|
234 | # not overridden by config or cl_args | |
231 | self.ip = cfg['ip'] |
|
235 | self.ip = cfg['ip'] | |
232 | for channel in ('hb', 'shell', 'iopub', 'stdin', 'control'): |
|
236 | for channel in ('hb', 'shell', 'iopub', 'stdin', 'control'): | |
233 | name = channel + '_port' |
|
237 | name = channel + '_port' | |
234 | if getattr(self, name) == 0 and name in cfg: |
|
238 | if getattr(self, name) == 0 and name in cfg: | |
235 | # not overridden by config or cl_args |
|
239 | # not overridden by config or cl_args | |
236 | setattr(self, name, cfg[name]) |
|
240 | setattr(self, name, cfg[name]) | |
237 | if 'key' in cfg: |
|
241 | if 'key' in cfg: | |
238 | self.config.Session.key = str_to_bytes(cfg['key']) |
|
242 | self.config.Session.key = str_to_bytes(cfg['key']) | |
239 |
|
243 | |||
240 | def write_connection_file(self): |
|
244 | def write_connection_file(self): | |
241 | """write connection info to JSON file""" |
|
245 | """write connection info to JSON file""" | |
242 | cf = self.abs_connection_file |
|
246 | cf = self.abs_connection_file | |
243 | self.log.debug("Writing connection file: %s", cf) |
|
247 | self.log.debug("Writing connection file: %s", cf) | |
244 | write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport, |
|
248 | write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport, | |
245 | shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, |
|
249 | shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, | |
246 | iopub_port=self.iopub_port, control_port=self.control_port) |
|
250 | iopub_port=self.iopub_port, control_port=self.control_port) | |
247 |
|
251 | |||
248 | def cleanup_connection_file(self): |
|
252 | def cleanup_connection_file(self): | |
249 | cf = self.abs_connection_file |
|
253 | cf = self.abs_connection_file | |
250 | self.log.debug("Cleaning up connection file: %s", cf) |
|
254 | self.log.debug("Cleaning up connection file: %s", cf) | |
251 | try: |
|
255 | try: | |
252 | os.remove(cf) |
|
256 | os.remove(cf) | |
253 | except (IOError, OSError): |
|
257 | except (IOError, OSError): | |
254 | pass |
|
258 | pass | |
255 |
|
259 | |||
256 | self.cleanup_ipc_files() |
|
260 | self.cleanup_ipc_files() | |
257 |
|
261 | |||
258 | def cleanup_ipc_files(self): |
|
262 | def cleanup_ipc_files(self): | |
259 | """cleanup ipc files if we wrote them""" |
|
263 | """cleanup ipc files if we wrote them""" | |
260 | if self.transport != 'ipc': |
|
264 | if self.transport != 'ipc': | |
261 | return |
|
265 | return | |
262 | for port in (self.shell_port, self.iopub_port, self.stdin_port, self.hb_port, self.control_port): |
|
266 | for port in (self.shell_port, self.iopub_port, self.stdin_port, self.hb_port, self.control_port): | |
263 | ipcfile = "%s-%i" % (self.ip, port) |
|
267 | ipcfile = "%s-%i" % (self.ip, port) | |
264 | try: |
|
268 | try: | |
265 | os.remove(ipcfile) |
|
269 | os.remove(ipcfile) | |
266 | except (IOError, OSError): |
|
270 | except (IOError, OSError): | |
267 | pass |
|
271 | pass | |
268 |
|
272 | |||
269 | def init_connection_file(self): |
|
273 | def init_connection_file(self): | |
270 | if not self.connection_file: |
|
274 | if not self.connection_file: | |
271 | self.connection_file = "kernel-%s.json"%os.getpid() |
|
275 | self.connection_file = "kernel-%s.json"%os.getpid() | |
272 | try: |
|
276 | try: | |
273 | self.load_connection_file() |
|
277 | self.load_connection_file() | |
274 | except Exception: |
|
278 | except Exception: | |
275 | self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True) |
|
279 | self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True) | |
276 | self.exit(1) |
|
280 | self.exit(1) | |
277 |
|
281 | |||
278 | def init_sockets(self): |
|
282 | def init_sockets(self): | |
279 | # Create a context, a session, and the kernel sockets. |
|
283 | # Create a context, a session, and the kernel sockets. | |
280 | self.log.info("Starting the kernel at pid: %i", os.getpid()) |
|
284 | self.log.info("Starting the kernel at pid: %i", os.getpid()) | |
281 | context = zmq.Context.instance() |
|
285 | context = zmq.Context.instance() | |
282 | # Uncomment this to try closing the context. |
|
286 | # Uncomment this to try closing the context. | |
283 | # atexit.register(context.term) |
|
287 | # atexit.register(context.term) | |
284 |
|
288 | |||
285 | self.shell_socket = context.socket(zmq.ROUTER) |
|
289 | self.shell_socket = context.socket(zmq.ROUTER) | |
286 | self.shell_port = self._bind_socket(self.shell_socket, self.shell_port) |
|
290 | self.shell_port = self._bind_socket(self.shell_socket, self.shell_port) | |
287 | self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port) |
|
291 | self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port) | |
288 |
|
292 | |||
289 | self.iopub_socket = context.socket(zmq.PUB) |
|
293 | self.iopub_socket = context.socket(zmq.PUB) | |
290 | self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port) |
|
294 | self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port) | |
291 | self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port) |
|
295 | self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port) | |
292 |
|
296 | |||
293 | self.stdin_socket = context.socket(zmq.ROUTER) |
|
297 | self.stdin_socket = context.socket(zmq.ROUTER) | |
294 | self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port) |
|
298 | self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port) | |
295 | self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port) |
|
299 | self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port) | |
296 |
|
300 | |||
297 | self.control_socket = context.socket(zmq.ROUTER) |
|
301 | self.control_socket = context.socket(zmq.ROUTER) | |
298 | self.control_port = self._bind_socket(self.control_socket, self.control_port) |
|
302 | self.control_port = self._bind_socket(self.control_socket, self.control_port) | |
299 | self.log.debug("control ROUTER Channel on port: %i" % self.control_port) |
|
303 | self.log.debug("control ROUTER Channel on port: %i" % self.control_port) | |
300 |
|
304 | |||
301 | def init_heartbeat(self): |
|
305 | def init_heartbeat(self): | |
302 | """start the heart beating""" |
|
306 | """start the heart beating""" | |
303 | # heartbeat doesn't share context, because it mustn't be blocked |
|
307 | # heartbeat doesn't share context, because it mustn't be blocked | |
304 | # by the GIL, which is accessed by libzmq when freeing zero-copy messages |
|
308 | # by the GIL, which is accessed by libzmq when freeing zero-copy messages | |
305 | hb_ctx = zmq.Context() |
|
309 | hb_ctx = zmq.Context() | |
306 | self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port)) |
|
310 | self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port)) | |
307 | self.hb_port = self.heartbeat.port |
|
311 | self.hb_port = self.heartbeat.port | |
308 | self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port) |
|
312 | self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port) | |
309 | self.heartbeat.start() |
|
313 | self.heartbeat.start() | |
310 |
|
314 | |||
311 | def log_connection_info(self): |
|
315 | def log_connection_info(self): | |
312 | """display connection info, and store ports""" |
|
316 | """display connection info, and store ports""" | |
313 | basename = os.path.basename(self.connection_file) |
|
317 | basename = os.path.basename(self.connection_file) | |
314 | if basename == self.connection_file or \ |
|
318 | if basename == self.connection_file or \ | |
315 | os.path.dirname(self.connection_file) == self.profile_dir.security_dir: |
|
319 | os.path.dirname(self.connection_file) == self.profile_dir.security_dir: | |
316 | # use shortname |
|
320 | # use shortname | |
317 | tail = basename |
|
321 | tail = basename | |
318 | if self.profile != 'default': |
|
322 | if self.profile != 'default': | |
319 | tail += " --profile %s" % self.profile |
|
323 | tail += " --profile %s" % self.profile | |
320 | else: |
|
324 | else: | |
321 | tail = self.connection_file |
|
325 | tail = self.connection_file | |
322 | lines = [ |
|
326 | lines = [ | |
323 | "To connect another client to this kernel, use:", |
|
327 | "To connect another client to this kernel, use:", | |
324 | " --existing %s" % tail, |
|
328 | " --existing %s" % tail, | |
325 | ] |
|
329 | ] | |
326 | # log connection info |
|
330 | # log connection info | |
327 | # info-level, so often not shown. |
|
331 | # info-level, so often not shown. | |
328 | # frontends should use the %connect_info magic |
|
332 | # frontends should use the %connect_info magic | |
329 | # to see the connection info |
|
333 | # to see the connection info | |
330 | for line in lines: |
|
334 | for line in lines: | |
331 | self.log.info(line) |
|
335 | self.log.info(line) | |
332 | # also raw print to the terminal if no parent_handle (`ipython kernel`) |
|
336 | # also raw print to the terminal if no parent_handle (`ipython kernel`) | |
333 | if not self.parent_handle: |
|
337 | if not self.parent_handle: | |
334 | for line in lines: |
|
338 | for line in lines: | |
335 | io.rprint(line) |
|
339 | io.rprint(line) | |
336 |
|
340 | |||
337 | self.ports = dict(shell=self.shell_port, iopub=self.iopub_port, |
|
341 | self.ports = dict(shell=self.shell_port, iopub=self.iopub_port, | |
338 | stdin=self.stdin_port, hb=self.hb_port, |
|
342 | stdin=self.stdin_port, hb=self.hb_port, | |
339 | control=self.control_port) |
|
343 | control=self.control_port) | |
340 |
|
344 | |||
341 | def init_session(self): |
|
345 | def init_session(self): | |
342 | """create our session object""" |
|
346 | """create our session object""" | |
343 | default_secure(self.config) |
|
347 | default_secure(self.config) | |
344 | self.session = Session(parent=self, username=u'kernel') |
|
348 | self.session = Session(parent=self, username=u'kernel') | |
345 |
|
349 | |||
346 | def init_blackhole(self): |
|
350 | def init_blackhole(self): | |
347 | """redirects stdout/stderr to devnull if necessary""" |
|
351 | """redirects stdout/stderr to devnull if necessary""" | |
348 | if self.no_stdout or self.no_stderr: |
|
352 | if self.no_stdout or self.no_stderr: | |
349 | blackhole = open(os.devnull, 'w') |
|
353 | blackhole = open(os.devnull, 'w') | |
350 | if self.no_stdout: |
|
354 | if self.no_stdout: | |
351 | sys.stdout = sys.__stdout__ = blackhole |
|
355 | sys.stdout = sys.__stdout__ = blackhole | |
352 | if self.no_stderr: |
|
356 | if self.no_stderr: | |
353 | sys.stderr = sys.__stderr__ = blackhole |
|
357 | sys.stderr = sys.__stderr__ = blackhole | |
354 |
|
358 | |||
355 | def init_io(self): |
|
359 | def init_io(self): | |
356 | """Redirect input streams and set a display hook.""" |
|
360 | """Redirect input streams and set a display hook.""" | |
357 | if self.outstream_class: |
|
361 | if self.outstream_class: | |
358 | outstream_factory = import_item(str(self.outstream_class)) |
|
362 | outstream_factory = import_item(str(self.outstream_class)) | |
359 | sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout') |
|
363 | sys.stdout = outstream_factory(self.session, self.iopub_socket, u'stdout') | |
360 | sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr') |
|
364 | sys.stderr = outstream_factory(self.session, self.iopub_socket, u'stderr') | |
361 | if self.displayhook_class: |
|
365 | if self.displayhook_class: | |
362 | displayhook_factory = import_item(str(self.displayhook_class)) |
|
366 | displayhook_factory = import_item(str(self.displayhook_class)) | |
363 | sys.displayhook = displayhook_factory(self.session, self.iopub_socket) |
|
367 | sys.displayhook = displayhook_factory(self.session, self.iopub_socket) | |
364 |
|
368 | |||
365 | def init_signal(self): |
|
369 | def init_signal(self): | |
366 | signal.signal(signal.SIGINT, signal.SIG_IGN) |
|
370 | signal.signal(signal.SIGINT, signal.SIG_IGN) | |
367 |
|
371 | |||
368 | def init_kernel(self): |
|
372 | def init_kernel(self): | |
369 | """Create the Kernel object itself""" |
|
373 | """Create the Kernel object itself""" | |
370 | shell_stream = ZMQStream(self.shell_socket) |
|
374 | shell_stream = ZMQStream(self.shell_socket) | |
371 | control_stream = ZMQStream(self.control_socket) |
|
375 | control_stream = ZMQStream(self.control_socket) | |
372 |
|
376 | |||
373 | kernel_factory = import_item(str(self.kernel_class)) |
|
377 | kernel_factory = import_item(str(self.kernel_class)) | |
374 |
|
378 | |||
375 | kernel = kernel_factory(parent=self, session=self.session, |
|
379 | kernel = kernel_factory(parent=self, session=self.session, | |
376 | shell_streams=[shell_stream, control_stream], |
|
380 | shell_streams=[shell_stream, control_stream], | |
377 | iopub_socket=self.iopub_socket, |
|
381 | iopub_socket=self.iopub_socket, | |
378 | stdin_socket=self.stdin_socket, |
|
382 | stdin_socket=self.stdin_socket, | |
379 | log=self.log, |
|
383 | log=self.log, | |
380 | profile_dir=self.profile_dir, |
|
384 | profile_dir=self.profile_dir, | |
381 | ) |
|
385 | ) | |
382 | kernel.record_ports(self.ports) |
|
386 | kernel.record_ports(self.ports) | |
383 | self.kernel = kernel |
|
387 | self.kernel = kernel | |
384 |
|
388 | |||
385 | def init_gui_pylab(self): |
|
389 | def init_gui_pylab(self): | |
386 | """Enable GUI event loop integration, taking pylab into account.""" |
|
390 | """Enable GUI event loop integration, taking pylab into account.""" | |
387 |
|
391 | |||
388 | # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab` |
|
392 | # Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab` | |
389 | # to ensure that any exception is printed straight to stderr. |
|
393 | # to ensure that any exception is printed straight to stderr. | |
390 | # Normally _showtraceback associates the reply with an execution, |
|
394 | # Normally _showtraceback associates the reply with an execution, | |
391 | # which means frontends will never draw it, as this exception |
|
395 | # which means frontends will never draw it, as this exception | |
392 | # is not associated with any execute request. |
|
396 | # is not associated with any execute request. | |
393 |
|
397 | |||
394 | shell = self.shell |
|
398 | shell = self.shell | |
395 | _showtraceback = shell._showtraceback |
|
399 | _showtraceback = shell._showtraceback | |
396 | try: |
|
400 | try: | |
397 | # replace pyerr-sending traceback with stderr |
|
401 | # replace pyerr-sending traceback with stderr | |
398 | def print_tb(etype, evalue, stb): |
|
402 | def print_tb(etype, evalue, stb): | |
399 | print ("GUI event loop or pylab initialization failed", |
|
403 | print ("GUI event loop or pylab initialization failed", | |
400 | file=io.stderr) |
|
404 | file=io.stderr) | |
401 | print (shell.InteractiveTB.stb2text(stb), file=io.stderr) |
|
405 | print (shell.InteractiveTB.stb2text(stb), file=io.stderr) | |
402 | shell._showtraceback = print_tb |
|
406 | shell._showtraceback = print_tb | |
403 | InteractiveShellApp.init_gui_pylab(self) |
|
407 | InteractiveShellApp.init_gui_pylab(self) | |
404 | finally: |
|
408 | finally: | |
405 | shell._showtraceback = _showtraceback |
|
409 | shell._showtraceback = _showtraceback | |
406 |
|
410 | |||
407 | def init_shell(self): |
|
411 | def init_shell(self): | |
408 | self.shell = self.kernel.shell |
|
412 | self.shell = self.kernel.shell | |
409 | self.shell.configurables.append(self) |
|
413 | self.shell.configurables.append(self) | |
410 |
|
414 | |||
411 | @catch_config_error |
|
415 | @catch_config_error | |
412 | def initialize(self, argv=None): |
|
416 | def initialize(self, argv=None): | |
413 | super(IPKernelApp, self).initialize(argv) |
|
417 | super(IPKernelApp, self).initialize(argv) | |
414 | self.init_blackhole() |
|
418 | self.init_blackhole() | |
415 | self.init_connection_file() |
|
419 | self.init_connection_file() | |
416 | self.init_session() |
|
420 | self.init_session() | |
417 | self.init_poller() |
|
421 | self.init_poller() | |
418 | self.init_sockets() |
|
422 | self.init_sockets() | |
419 | self.init_heartbeat() |
|
423 | self.init_heartbeat() | |
420 | # writing/displaying connection info must be *after* init_sockets/heartbeat |
|
424 | # writing/displaying connection info must be *after* init_sockets/heartbeat | |
421 | self.log_connection_info() |
|
425 | self.log_connection_info() | |
422 | self.write_connection_file() |
|
426 | self.write_connection_file() | |
423 | self.init_io() |
|
427 | self.init_io() | |
424 | self.init_signal() |
|
428 | self.init_signal() | |
425 | self.init_kernel() |
|
429 | self.init_kernel() | |
426 | # shell init steps |
|
430 | # shell init steps | |
427 | self.init_path() |
|
431 | self.init_path() | |
428 | self.init_shell() |
|
432 | self.init_shell() | |
429 | self.init_gui_pylab() |
|
433 | self.init_gui_pylab() | |
430 | self.init_extensions() |
|
434 | self.init_extensions() | |
431 | self.init_code() |
|
435 | self.init_code() | |
432 | # flush stdout/stderr, so that anything written to these streams during |
|
436 | # flush stdout/stderr, so that anything written to these streams during | |
433 | # initialization do not get associated with the first execution request |
|
437 | # initialization do not get associated with the first execution request | |
434 | sys.stdout.flush() |
|
438 | sys.stdout.flush() | |
435 | sys.stderr.flush() |
|
439 | sys.stderr.flush() | |
436 |
|
440 | |||
437 | def start(self): |
|
441 | def start(self): | |
438 | if self.poller is not None: |
|
442 | if self.poller is not None: | |
439 | self.poller.start() |
|
443 | self.poller.start() | |
440 | self.kernel.start() |
|
444 | self.kernel.start() | |
441 | try: |
|
445 | try: | |
442 | ioloop.IOLoop.instance().start() |
|
446 | ioloop.IOLoop.instance().start() | |
443 | except KeyboardInterrupt: |
|
447 | except KeyboardInterrupt: | |
444 | pass |
|
448 | pass | |
445 |
|
449 | |||
446 |
|
450 | |||
447 | def main(): |
|
451 | def main(): | |
448 | """Run an IPKernel as an application""" |
|
452 | """Run an IPKernel as an application""" | |
449 | app = IPKernelApp.instance() |
|
453 | app = IPKernelApp.instance() | |
450 | app.initialize() |
|
454 | app.initialize() | |
451 | app.start() |
|
455 | app.start() | |
452 |
|
456 | |||
453 |
|
457 | |||
454 | if __name__ == '__main__': |
|
458 | if __name__ == '__main__': | |
455 | main() |
|
459 | main() |
@@ -1,615 +1,610 | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 | """ |
|
3 | """ | |
4 | The ipcluster application. |
|
4 | The ipcluster application. | |
5 |
|
5 | |||
6 | Authors: |
|
6 | Authors: | |
7 |
|
7 | |||
8 | * Brian Granger |
|
8 | * Brian Granger | |
9 | * MinRK |
|
9 | * MinRK | |
10 |
|
10 | |||
11 | """ |
|
11 | """ | |
12 |
|
12 | |||
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 | # Copyright (C) 2008-2011 The IPython Development Team |
|
14 | # Copyright (C) 2008-2011 The IPython Development Team | |
15 | # |
|
15 | # | |
16 | # Distributed under the terms of the BSD License. The full license is in |
|
16 | # Distributed under the terms of the BSD License. The full license is in | |
17 | # the file COPYING, distributed as part of this software. |
|
17 | # the file COPYING, distributed as part of this software. | |
18 | #----------------------------------------------------------------------------- |
|
18 | #----------------------------------------------------------------------------- | |
19 |
|
19 | |||
20 | #----------------------------------------------------------------------------- |
|
20 | #----------------------------------------------------------------------------- | |
21 | # Imports |
|
21 | # Imports | |
22 | #----------------------------------------------------------------------------- |
|
22 | #----------------------------------------------------------------------------- | |
23 |
|
23 | |||
24 | import errno |
|
24 | import errno | |
25 | import logging |
|
25 | import logging | |
26 | import os |
|
26 | import os | |
27 | import re |
|
27 | import re | |
28 | import signal |
|
28 | import signal | |
29 |
|
29 | |||
30 | from subprocess import check_call, CalledProcessError, PIPE |
|
30 | from subprocess import check_call, CalledProcessError, PIPE | |
31 | import zmq |
|
31 | import zmq | |
32 | from zmq.eventloop import ioloop |
|
32 | from zmq.eventloop import ioloop | |
33 |
|
33 | |||
34 | from IPython.config.application import Application, boolean_flag, catch_config_error |
|
34 | from IPython.config.application import Application, boolean_flag, catch_config_error | |
35 | from IPython.config.loader import Config |
|
35 | from IPython.config.loader import Config | |
36 | from IPython.core.application import BaseIPythonApplication |
|
36 | from IPython.core.application import BaseIPythonApplication | |
37 | from IPython.core.profiledir import ProfileDir |
|
37 | from IPython.core.profiledir import ProfileDir | |
38 | from IPython.utils.daemonize import daemonize |
|
38 | from IPython.utils.daemonize import daemonize | |
39 | from IPython.utils.importstring import import_item |
|
39 | from IPython.utils.importstring import import_item | |
40 | from IPython.utils.sysinfo import num_cpus |
|
40 | from IPython.utils.sysinfo import num_cpus | |
41 | from IPython.utils.traitlets import (Integer, Unicode, Bool, CFloat, Dict, List, Any, |
|
41 | from IPython.utils.traitlets import (Integer, Unicode, Bool, CFloat, Dict, List, Any, | |
42 | DottedObjectName) |
|
42 | DottedObjectName) | |
43 |
|
43 | |||
44 | from IPython.parallel.apps.baseapp import ( |
|
44 | from IPython.parallel.apps.baseapp import ( | |
45 | BaseParallelApplication, |
|
45 | BaseParallelApplication, | |
46 | PIDFileError, |
|
46 | PIDFileError, | |
47 | base_flags, base_aliases |
|
47 | base_flags, base_aliases | |
48 | ) |
|
48 | ) | |
49 |
|
49 | |||
50 |
|
50 | |||
51 | #----------------------------------------------------------------------------- |
|
51 | #----------------------------------------------------------------------------- | |
52 | # Module level variables |
|
52 | # Module level variables | |
53 | #----------------------------------------------------------------------------- |
|
53 | #----------------------------------------------------------------------------- | |
54 |
|
54 | |||
55 |
|
55 | |||
56 | default_config_file_name = u'ipcluster_config.py' |
|
|||
57 |
|
||||
58 |
|
||||
59 | _description = """Start an IPython cluster for parallel computing. |
|
56 | _description = """Start an IPython cluster for parallel computing. | |
60 |
|
57 | |||
61 | An IPython cluster consists of 1 controller and 1 or more engines. |
|
58 | An IPython cluster consists of 1 controller and 1 or more engines. | |
62 | This command automates the startup of these processes using a wide range of |
|
59 | This command automates the startup of these processes using a wide range of | |
63 | startup methods (SSH, local processes, PBS, mpiexec, SGE, LSF, HTCondor, |
|
60 | startup methods (SSH, local processes, PBS, mpiexec, SGE, LSF, HTCondor, | |
64 | Windows HPC Server 2008). To start a cluster with 4 engines on your |
|
61 | Windows HPC Server 2008). To start a cluster with 4 engines on your | |
65 | local host simply do 'ipcluster start --n=4'. For more complex usage |
|
62 | local host simply do 'ipcluster start --n=4'. For more complex usage | |
66 | you will typically do 'ipython profile create mycluster --parallel', then edit |
|
63 | you will typically do 'ipython profile create mycluster --parallel', then edit | |
67 | configuration files, followed by 'ipcluster start --profile=mycluster --n=4'. |
|
64 | configuration files, followed by 'ipcluster start --profile=mycluster --n=4'. | |
68 | """ |
|
65 | """ | |
69 |
|
66 | |||
70 | _main_examples = """ |
|
67 | _main_examples = """ | |
71 | ipcluster start --n=4 # start a 4 node cluster on localhost |
|
68 | ipcluster start --n=4 # start a 4 node cluster on localhost | |
72 | ipcluster start -h # show the help string for the start subcmd |
|
69 | ipcluster start -h # show the help string for the start subcmd | |
73 |
|
70 | |||
74 | ipcluster stop -h # show the help string for the stop subcmd |
|
71 | ipcluster stop -h # show the help string for the stop subcmd | |
75 | ipcluster engines -h # show the help string for the engines subcmd |
|
72 | ipcluster engines -h # show the help string for the engines subcmd | |
76 | """ |
|
73 | """ | |
77 |
|
74 | |||
78 | _start_examples = """ |
|
75 | _start_examples = """ | |
79 | ipython profile create mycluster --parallel # create mycluster profile |
|
76 | ipython profile create mycluster --parallel # create mycluster profile | |
80 | ipcluster start --profile=mycluster --n=4 # start mycluster with 4 nodes |
|
77 | ipcluster start --profile=mycluster --n=4 # start mycluster with 4 nodes | |
81 | """ |
|
78 | """ | |
82 |
|
79 | |||
83 | _stop_examples = """ |
|
80 | _stop_examples = """ | |
84 | ipcluster stop --profile=mycluster # stop a running cluster by profile name |
|
81 | ipcluster stop --profile=mycluster # stop a running cluster by profile name | |
85 | """ |
|
82 | """ | |
86 |
|
83 | |||
87 | _engines_examples = """ |
|
84 | _engines_examples = """ | |
88 | ipcluster engines --profile=mycluster --n=4 # start 4 engines only |
|
85 | ipcluster engines --profile=mycluster --n=4 # start 4 engines only | |
89 | """ |
|
86 | """ | |
90 |
|
87 | |||
91 |
|
88 | |||
92 | # Exit codes for ipcluster |
|
89 | # Exit codes for ipcluster | |
93 |
|
90 | |||
94 | # This will be the exit code if the ipcluster appears to be running because |
|
91 | # This will be the exit code if the ipcluster appears to be running because | |
95 | # a .pid file exists |
|
92 | # a .pid file exists | |
96 | ALREADY_STARTED = 10 |
|
93 | ALREADY_STARTED = 10 | |
97 |
|
94 | |||
98 |
|
95 | |||
99 | # This will be the exit code if ipcluster stop is run, but there is not .pid |
|
96 | # This will be the exit code if ipcluster stop is run, but there is not .pid | |
100 | # file to be found. |
|
97 | # file to be found. | |
101 | ALREADY_STOPPED = 11 |
|
98 | ALREADY_STOPPED = 11 | |
102 |
|
99 | |||
103 | # This will be the exit code if ipcluster engines is run, but there is not .pid |
|
100 | # This will be the exit code if ipcluster engines is run, but there is not .pid | |
104 | # file to be found. |
|
101 | # file to be found. | |
105 | NO_CLUSTER = 12 |
|
102 | NO_CLUSTER = 12 | |
106 |
|
103 | |||
107 |
|
104 | |||
108 | #----------------------------------------------------------------------------- |
|
105 | #----------------------------------------------------------------------------- | |
109 | # Utilities |
|
106 | # Utilities | |
110 | #----------------------------------------------------------------------------- |
|
107 | #----------------------------------------------------------------------------- | |
111 |
|
108 | |||
112 | def find_launcher_class(clsname, kind): |
|
109 | def find_launcher_class(clsname, kind): | |
113 | """Return a launcher for a given clsname and kind. |
|
110 | """Return a launcher for a given clsname and kind. | |
114 |
|
111 | |||
115 | Parameters |
|
112 | Parameters | |
116 | ========== |
|
113 | ========== | |
117 | clsname : str |
|
114 | clsname : str | |
118 | The full name of the launcher class, either with or without the |
|
115 | The full name of the launcher class, either with or without the | |
119 | module path, or an abbreviation (MPI, SSH, SGE, PBS, LSF, HTCondor |
|
116 | module path, or an abbreviation (MPI, SSH, SGE, PBS, LSF, HTCondor | |
120 | WindowsHPC). |
|
117 | WindowsHPC). | |
121 | kind : str |
|
118 | kind : str | |
122 | Either 'EngineSet' or 'Controller'. |
|
119 | Either 'EngineSet' or 'Controller'. | |
123 | """ |
|
120 | """ | |
124 | if '.' not in clsname: |
|
121 | if '.' not in clsname: | |
125 | # not a module, presume it's the raw name in apps.launcher |
|
122 | # not a module, presume it's the raw name in apps.launcher | |
126 | if kind and kind not in clsname: |
|
123 | if kind and kind not in clsname: | |
127 | # doesn't match necessary full class name, assume it's |
|
124 | # doesn't match necessary full class name, assume it's | |
128 | # just 'PBS' or 'MPI' etc prefix: |
|
125 | # just 'PBS' or 'MPI' etc prefix: | |
129 | clsname = clsname + kind + 'Launcher' |
|
126 | clsname = clsname + kind + 'Launcher' | |
130 | clsname = 'IPython.parallel.apps.launcher.'+clsname |
|
127 | clsname = 'IPython.parallel.apps.launcher.'+clsname | |
131 | klass = import_item(clsname) |
|
128 | klass = import_item(clsname) | |
132 | return klass |
|
129 | return klass | |
133 |
|
130 | |||
134 | #----------------------------------------------------------------------------- |
|
131 | #----------------------------------------------------------------------------- | |
135 | # Main application |
|
132 | # Main application | |
136 | #----------------------------------------------------------------------------- |
|
133 | #----------------------------------------------------------------------------- | |
137 |
|
134 | |||
138 | start_help = """Start an IPython cluster for parallel computing |
|
135 | start_help = """Start an IPython cluster for parallel computing | |
139 |
|
136 | |||
140 | Start an ipython cluster by its profile name or cluster |
|
137 | Start an ipython cluster by its profile name or cluster | |
141 | directory. Cluster directories contain configuration, log and |
|
138 | directory. Cluster directories contain configuration, log and | |
142 | security related files and are named using the convention |
|
139 | security related files and are named using the convention | |
143 | 'profile_<name>' and should be creating using the 'start' |
|
140 | 'profile_<name>' and should be creating using the 'start' | |
144 | subcommand of 'ipcluster'. If your cluster directory is in |
|
141 | subcommand of 'ipcluster'. If your cluster directory is in | |
145 | the cwd or the ipython directory, you can simply refer to it |
|
142 | the cwd or the ipython directory, you can simply refer to it | |
146 | using its profile name, 'ipcluster start --n=4 --profile=<profile>`, |
|
143 | using its profile name, 'ipcluster start --n=4 --profile=<profile>`, | |
147 | otherwise use the 'profile-dir' option. |
|
144 | otherwise use the 'profile-dir' option. | |
148 | """ |
|
145 | """ | |
149 | stop_help = """Stop a running IPython cluster |
|
146 | stop_help = """Stop a running IPython cluster | |
150 |
|
147 | |||
151 | Stop a running ipython cluster by its profile name or cluster |
|
148 | Stop a running ipython cluster by its profile name or cluster | |
152 | directory. Cluster directories are named using the convention |
|
149 | directory. Cluster directories are named using the convention | |
153 | 'profile_<name>'. If your cluster directory is in |
|
150 | 'profile_<name>'. If your cluster directory is in | |
154 | the cwd or the ipython directory, you can simply refer to it |
|
151 | the cwd or the ipython directory, you can simply refer to it | |
155 | using its profile name, 'ipcluster stop --profile=<profile>`, otherwise |
|
152 | using its profile name, 'ipcluster stop --profile=<profile>`, otherwise | |
156 | use the '--profile-dir' option. |
|
153 | use the '--profile-dir' option. | |
157 | """ |
|
154 | """ | |
158 | engines_help = """Start engines connected to an existing IPython cluster |
|
155 | engines_help = """Start engines connected to an existing IPython cluster | |
159 |
|
156 | |||
160 | Start one or more engines to connect to an existing Cluster |
|
157 | Start one or more engines to connect to an existing Cluster | |
161 | by profile name or cluster directory. |
|
158 | by profile name or cluster directory. | |
162 | Cluster directories contain configuration, log and |
|
159 | Cluster directories contain configuration, log and | |
163 | security related files and are named using the convention |
|
160 | security related files and are named using the convention | |
164 | 'profile_<name>' and should be creating using the 'start' |
|
161 | 'profile_<name>' and should be creating using the 'start' | |
165 | subcommand of 'ipcluster'. If your cluster directory is in |
|
162 | subcommand of 'ipcluster'. If your cluster directory is in | |
166 | the cwd or the ipython directory, you can simply refer to it |
|
163 | the cwd or the ipython directory, you can simply refer to it | |
167 | using its profile name, 'ipcluster engines --n=4 --profile=<profile>`, |
|
164 | using its profile name, 'ipcluster engines --n=4 --profile=<profile>`, | |
168 | otherwise use the 'profile-dir' option. |
|
165 | otherwise use the 'profile-dir' option. | |
169 | """ |
|
166 | """ | |
170 | stop_aliases = dict( |
|
167 | stop_aliases = dict( | |
171 | signal='IPClusterStop.signal', |
|
168 | signal='IPClusterStop.signal', | |
172 | ) |
|
169 | ) | |
173 | stop_aliases.update(base_aliases) |
|
170 | stop_aliases.update(base_aliases) | |
174 |
|
171 | |||
175 | class IPClusterStop(BaseParallelApplication): |
|
172 | class IPClusterStop(BaseParallelApplication): | |
176 | name = u'ipcluster' |
|
173 | name = u'ipcluster' | |
177 | description = stop_help |
|
174 | description = stop_help | |
178 | examples = _stop_examples |
|
175 | examples = _stop_examples | |
179 | config_file_name = Unicode(default_config_file_name) |
|
|||
180 |
|
176 | |||
181 | signal = Integer(signal.SIGINT, config=True, |
|
177 | signal = Integer(signal.SIGINT, config=True, | |
182 | help="signal to use for stopping processes.") |
|
178 | help="signal to use for stopping processes.") | |
183 |
|
179 | |||
184 | aliases = Dict(stop_aliases) |
|
180 | aliases = Dict(stop_aliases) | |
185 |
|
181 | |||
186 | def start(self): |
|
182 | def start(self): | |
187 | """Start the app for the stop subcommand.""" |
|
183 | """Start the app for the stop subcommand.""" | |
188 | try: |
|
184 | try: | |
189 | pid = self.get_pid_from_file() |
|
185 | pid = self.get_pid_from_file() | |
190 | except PIDFileError: |
|
186 | except PIDFileError: | |
191 | self.log.critical( |
|
187 | self.log.critical( | |
192 | 'Could not read pid file, cluster is probably not running.' |
|
188 | 'Could not read pid file, cluster is probably not running.' | |
193 | ) |
|
189 | ) | |
194 | # Here I exit with a unusual exit status that other processes |
|
190 | # Here I exit with a unusual exit status that other processes | |
195 | # can watch for to learn how I existed. |
|
191 | # can watch for to learn how I existed. | |
196 | self.remove_pid_file() |
|
192 | self.remove_pid_file() | |
197 | self.exit(ALREADY_STOPPED) |
|
193 | self.exit(ALREADY_STOPPED) | |
198 |
|
194 | |||
199 | if not self.check_pid(pid): |
|
195 | if not self.check_pid(pid): | |
200 | self.log.critical( |
|
196 | self.log.critical( | |
201 | 'Cluster [pid=%r] is not running.' % pid |
|
197 | 'Cluster [pid=%r] is not running.' % pid | |
202 | ) |
|
198 | ) | |
203 | self.remove_pid_file() |
|
199 | self.remove_pid_file() | |
204 | # Here I exit with a unusual exit status that other processes |
|
200 | # Here I exit with a unusual exit status that other processes | |
205 | # can watch for to learn how I existed. |
|
201 | # can watch for to learn how I existed. | |
206 | self.exit(ALREADY_STOPPED) |
|
202 | self.exit(ALREADY_STOPPED) | |
207 |
|
203 | |||
208 | elif os.name=='posix': |
|
204 | elif os.name=='posix': | |
209 | sig = self.signal |
|
205 | sig = self.signal | |
210 | self.log.info( |
|
206 | self.log.info( | |
211 | "Stopping cluster [pid=%r] with [signal=%r]" % (pid, sig) |
|
207 | "Stopping cluster [pid=%r] with [signal=%r]" % (pid, sig) | |
212 | ) |
|
208 | ) | |
213 | try: |
|
209 | try: | |
214 | os.kill(pid, sig) |
|
210 | os.kill(pid, sig) | |
215 | except OSError: |
|
211 | except OSError: | |
216 | self.log.error("Stopping cluster failed, assuming already dead.", |
|
212 | self.log.error("Stopping cluster failed, assuming already dead.", | |
217 | exc_info=True) |
|
213 | exc_info=True) | |
218 | self.remove_pid_file() |
|
214 | self.remove_pid_file() | |
219 | elif os.name=='nt': |
|
215 | elif os.name=='nt': | |
220 | try: |
|
216 | try: | |
221 | # kill the whole tree |
|
217 | # kill the whole tree | |
222 | p = check_call(['taskkill', '-pid', str(pid), '-t', '-f'], stdout=PIPE,stderr=PIPE) |
|
218 | p = check_call(['taskkill', '-pid', str(pid), '-t', '-f'], stdout=PIPE,stderr=PIPE) | |
223 | except (CalledProcessError, OSError): |
|
219 | except (CalledProcessError, OSError): | |
224 | self.log.error("Stopping cluster failed, assuming already dead.", |
|
220 | self.log.error("Stopping cluster failed, assuming already dead.", | |
225 | exc_info=True) |
|
221 | exc_info=True) | |
226 | self.remove_pid_file() |
|
222 | self.remove_pid_file() | |
227 |
|
223 | |||
228 | engine_aliases = {} |
|
224 | engine_aliases = {} | |
229 | engine_aliases.update(base_aliases) |
|
225 | engine_aliases.update(base_aliases) | |
230 | engine_aliases.update(dict( |
|
226 | engine_aliases.update(dict( | |
231 | n='IPClusterEngines.n', |
|
227 | n='IPClusterEngines.n', | |
232 | engines = 'IPClusterEngines.engine_launcher_class', |
|
228 | engines = 'IPClusterEngines.engine_launcher_class', | |
233 | daemonize = 'IPClusterEngines.daemonize', |
|
229 | daemonize = 'IPClusterEngines.daemonize', | |
234 | )) |
|
230 | )) | |
235 | engine_flags = {} |
|
231 | engine_flags = {} | |
236 | engine_flags.update(base_flags) |
|
232 | engine_flags.update(base_flags) | |
237 |
|
233 | |||
238 | engine_flags.update(dict( |
|
234 | engine_flags.update(dict( | |
239 | daemonize=( |
|
235 | daemonize=( | |
240 | {'IPClusterEngines' : {'daemonize' : True}}, |
|
236 | {'IPClusterEngines' : {'daemonize' : True}}, | |
241 | """run the cluster into the background (not available on Windows)""", |
|
237 | """run the cluster into the background (not available on Windows)""", | |
242 | ) |
|
238 | ) | |
243 | )) |
|
239 | )) | |
244 | class IPClusterEngines(BaseParallelApplication): |
|
240 | class IPClusterEngines(BaseParallelApplication): | |
245 |
|
241 | |||
246 | name = u'ipcluster' |
|
242 | name = u'ipcluster' | |
247 | description = engines_help |
|
243 | description = engines_help | |
248 | examples = _engines_examples |
|
244 | examples = _engines_examples | |
249 | usage = None |
|
245 | usage = None | |
250 | config_file_name = Unicode(default_config_file_name) |
|
|||
251 | default_log_level = logging.INFO |
|
246 | default_log_level = logging.INFO | |
252 | classes = List() |
|
247 | classes = List() | |
253 | def _classes_default(self): |
|
248 | def _classes_default(self): | |
254 | from IPython.parallel.apps import launcher |
|
249 | from IPython.parallel.apps import launcher | |
255 | launchers = launcher.all_launchers |
|
250 | launchers = launcher.all_launchers | |
256 | eslaunchers = [ l for l in launchers if 'EngineSet' in l.__name__] |
|
251 | eslaunchers = [ l for l in launchers if 'EngineSet' in l.__name__] | |
257 | return [ProfileDir]+eslaunchers |
|
252 | return [ProfileDir]+eslaunchers | |
258 |
|
253 | |||
259 | n = Integer(num_cpus(), config=True, |
|
254 | n = Integer(num_cpus(), config=True, | |
260 | help="""The number of engines to start. The default is to use one for each |
|
255 | help="""The number of engines to start. The default is to use one for each | |
261 | CPU on your machine""") |
|
256 | CPU on your machine""") | |
262 |
|
257 | |||
263 | engine_launcher = Any(config=True, help="Deprecated, use engine_launcher_class") |
|
258 | engine_launcher = Any(config=True, help="Deprecated, use engine_launcher_class") | |
264 | def _engine_launcher_changed(self, name, old, new): |
|
259 | def _engine_launcher_changed(self, name, old, new): | |
265 | if isinstance(new, basestring): |
|
260 | if isinstance(new, basestring): | |
266 | self.log.warn("WARNING: %s.engine_launcher is deprecated as of 0.12," |
|
261 | self.log.warn("WARNING: %s.engine_launcher is deprecated as of 0.12," | |
267 | " use engine_launcher_class" % self.__class__.__name__) |
|
262 | " use engine_launcher_class" % self.__class__.__name__) | |
268 | self.engine_launcher_class = new |
|
263 | self.engine_launcher_class = new | |
269 | engine_launcher_class = DottedObjectName('LocalEngineSetLauncher', |
|
264 | engine_launcher_class = DottedObjectName('LocalEngineSetLauncher', | |
270 | config=True, |
|
265 | config=True, | |
271 | help="""The class for launching a set of Engines. Change this value |
|
266 | help="""The class for launching a set of Engines. Change this value | |
272 | to use various batch systems to launch your engines, such as PBS,SGE,MPI,etc. |
|
267 | to use various batch systems to launch your engines, such as PBS,SGE,MPI,etc. | |
273 | Each launcher class has its own set of configuration options, for making sure |
|
268 | Each launcher class has its own set of configuration options, for making sure | |
274 | it will work in your environment. |
|
269 | it will work in your environment. | |
275 |
|
270 | |||
276 | You can also write your own launcher, and specify it's absolute import path, |
|
271 | You can also write your own launcher, and specify it's absolute import path, | |
277 | as in 'mymodule.launcher.FTLEnginesLauncher`. |
|
272 | as in 'mymodule.launcher.FTLEnginesLauncher`. | |
278 |
|
273 | |||
279 | IPython's bundled examples include: |
|
274 | IPython's bundled examples include: | |
280 |
|
275 | |||
281 | Local : start engines locally as subprocesses [default] |
|
276 | Local : start engines locally as subprocesses [default] | |
282 | MPI : use mpiexec to launch engines in an MPI environment |
|
277 | MPI : use mpiexec to launch engines in an MPI environment | |
283 | PBS : use PBS (qsub) to submit engines to a batch queue |
|
278 | PBS : use PBS (qsub) to submit engines to a batch queue | |
284 | SGE : use SGE (qsub) to submit engines to a batch queue |
|
279 | SGE : use SGE (qsub) to submit engines to a batch queue | |
285 | LSF : use LSF (bsub) to submit engines to a batch queue |
|
280 | LSF : use LSF (bsub) to submit engines to a batch queue | |
286 | SSH : use SSH to start the controller |
|
281 | SSH : use SSH to start the controller | |
287 | Note that SSH does *not* move the connection files |
|
282 | Note that SSH does *not* move the connection files | |
288 | around, so you will likely have to do this manually |
|
283 | around, so you will likely have to do this manually | |
289 | unless the machines are on a shared file system. |
|
284 | unless the machines are on a shared file system. | |
290 | HTCondor : use HTCondor to submit engines to a batch queue |
|
285 | HTCondor : use HTCondor to submit engines to a batch queue | |
291 | WindowsHPC : use Windows HPC |
|
286 | WindowsHPC : use Windows HPC | |
292 |
|
287 | |||
293 | If you are using one of IPython's builtin launchers, you can specify just the |
|
288 | If you are using one of IPython's builtin launchers, you can specify just the | |
294 | prefix, e.g: |
|
289 | prefix, e.g: | |
295 |
|
290 | |||
296 | c.IPClusterEngines.engine_launcher_class = 'SSH' |
|
291 | c.IPClusterEngines.engine_launcher_class = 'SSH' | |
297 |
|
292 | |||
298 | or: |
|
293 | or: | |
299 |
|
294 | |||
300 | ipcluster start --engines=MPI |
|
295 | ipcluster start --engines=MPI | |
301 |
|
296 | |||
302 | """ |
|
297 | """ | |
303 | ) |
|
298 | ) | |
304 | daemonize = Bool(False, config=True, |
|
299 | daemonize = Bool(False, config=True, | |
305 | help="""Daemonize the ipcluster program. This implies --log-to-file. |
|
300 | help="""Daemonize the ipcluster program. This implies --log-to-file. | |
306 | Not available on Windows. |
|
301 | Not available on Windows. | |
307 | """) |
|
302 | """) | |
308 |
|
303 | |||
309 | def _daemonize_changed(self, name, old, new): |
|
304 | def _daemonize_changed(self, name, old, new): | |
310 | if new: |
|
305 | if new: | |
311 | self.log_to_file = True |
|
306 | self.log_to_file = True | |
312 |
|
307 | |||
313 | early_shutdown = Integer(30, config=True, help="The timeout (in seconds)") |
|
308 | early_shutdown = Integer(30, config=True, help="The timeout (in seconds)") | |
314 | _stopping = False |
|
309 | _stopping = False | |
315 |
|
310 | |||
316 | aliases = Dict(engine_aliases) |
|
311 | aliases = Dict(engine_aliases) | |
317 | flags = Dict(engine_flags) |
|
312 | flags = Dict(engine_flags) | |
318 |
|
313 | |||
319 | @catch_config_error |
|
314 | @catch_config_error | |
320 | def initialize(self, argv=None): |
|
315 | def initialize(self, argv=None): | |
321 | super(IPClusterEngines, self).initialize(argv) |
|
316 | super(IPClusterEngines, self).initialize(argv) | |
322 | self.init_signal() |
|
317 | self.init_signal() | |
323 | self.init_launchers() |
|
318 | self.init_launchers() | |
324 |
|
319 | |||
325 | def init_launchers(self): |
|
320 | def init_launchers(self): | |
326 | self.engine_launcher = self.build_launcher(self.engine_launcher_class, 'EngineSet') |
|
321 | self.engine_launcher = self.build_launcher(self.engine_launcher_class, 'EngineSet') | |
327 |
|
322 | |||
328 | def init_signal(self): |
|
323 | def init_signal(self): | |
329 | # Setup signals |
|
324 | # Setup signals | |
330 | signal.signal(signal.SIGINT, self.sigint_handler) |
|
325 | signal.signal(signal.SIGINT, self.sigint_handler) | |
331 |
|
326 | |||
332 | def build_launcher(self, clsname, kind=None): |
|
327 | def build_launcher(self, clsname, kind=None): | |
333 | """import and instantiate a Launcher based on importstring""" |
|
328 | """import and instantiate a Launcher based on importstring""" | |
334 | try: |
|
329 | try: | |
335 | klass = find_launcher_class(clsname, kind) |
|
330 | klass = find_launcher_class(clsname, kind) | |
336 | except (ImportError, KeyError): |
|
331 | except (ImportError, KeyError): | |
337 | self.log.fatal("Could not import launcher class: %r"%clsname) |
|
332 | self.log.fatal("Could not import launcher class: %r"%clsname) | |
338 | self.exit(1) |
|
333 | self.exit(1) | |
339 |
|
334 | |||
340 | launcher = klass( |
|
335 | launcher = klass( | |
341 | work_dir=u'.', parent=self, log=self.log, |
|
336 | work_dir=u'.', parent=self, log=self.log, | |
342 | profile_dir=self.profile_dir.location, cluster_id=self.cluster_id, |
|
337 | profile_dir=self.profile_dir.location, cluster_id=self.cluster_id, | |
343 | ) |
|
338 | ) | |
344 | return launcher |
|
339 | return launcher | |
345 |
|
340 | |||
346 | def engines_started_ok(self): |
|
341 | def engines_started_ok(self): | |
347 | self.log.info("Engines appear to have started successfully") |
|
342 | self.log.info("Engines appear to have started successfully") | |
348 | self.early_shutdown = 0 |
|
343 | self.early_shutdown = 0 | |
349 |
|
344 | |||
350 | def start_engines(self): |
|
345 | def start_engines(self): | |
351 | # Some EngineSetLaunchers ignore `n` and use their own engine count, such as SSH: |
|
346 | # Some EngineSetLaunchers ignore `n` and use their own engine count, such as SSH: | |
352 | n = getattr(self.engine_launcher, 'engine_count', self.n) |
|
347 | n = getattr(self.engine_launcher, 'engine_count', self.n) | |
353 | self.log.info("Starting %s Engines with %s", n, self.engine_launcher_class) |
|
348 | self.log.info("Starting %s Engines with %s", n, self.engine_launcher_class) | |
354 | self.engine_launcher.start(self.n) |
|
349 | self.engine_launcher.start(self.n) | |
355 | self.engine_launcher.on_stop(self.engines_stopped_early) |
|
350 | self.engine_launcher.on_stop(self.engines_stopped_early) | |
356 | if self.early_shutdown: |
|
351 | if self.early_shutdown: | |
357 | ioloop.DelayedCallback(self.engines_started_ok, self.early_shutdown*1000, self.loop).start() |
|
352 | ioloop.DelayedCallback(self.engines_started_ok, self.early_shutdown*1000, self.loop).start() | |
358 |
|
353 | |||
359 | def engines_stopped_early(self, r): |
|
354 | def engines_stopped_early(self, r): | |
360 | if self.early_shutdown and not self._stopping: |
|
355 | if self.early_shutdown and not self._stopping: | |
361 | self.log.error(""" |
|
356 | self.log.error(""" | |
362 | Engines shutdown early, they probably failed to connect. |
|
357 | Engines shutdown early, they probably failed to connect. | |
363 |
|
358 | |||
364 | Check the engine log files for output. |
|
359 | Check the engine log files for output. | |
365 |
|
360 | |||
366 | If your controller and engines are not on the same machine, you probably |
|
361 | If your controller and engines are not on the same machine, you probably | |
367 | have to instruct the controller to listen on an interface other than localhost. |
|
362 | have to instruct the controller to listen on an interface other than localhost. | |
368 |
|
363 | |||
369 | You can set this by adding "--ip='*'" to your ControllerLauncher.controller_args. |
|
364 | You can set this by adding "--ip='*'" to your ControllerLauncher.controller_args. | |
370 |
|
365 | |||
371 | Be sure to read our security docs before instructing your controller to listen on |
|
366 | Be sure to read our security docs before instructing your controller to listen on | |
372 | a public interface. |
|
367 | a public interface. | |
373 | """) |
|
368 | """) | |
374 | self.stop_launchers() |
|
369 | self.stop_launchers() | |
375 |
|
370 | |||
376 | return self.engines_stopped(r) |
|
371 | return self.engines_stopped(r) | |
377 |
|
372 | |||
378 | def engines_stopped(self, r): |
|
373 | def engines_stopped(self, r): | |
379 | return self.loop.stop() |
|
374 | return self.loop.stop() | |
380 |
|
375 | |||
381 | def stop_engines(self): |
|
376 | def stop_engines(self): | |
382 | if self.engine_launcher.running: |
|
377 | if self.engine_launcher.running: | |
383 | self.log.info("Stopping Engines...") |
|
378 | self.log.info("Stopping Engines...") | |
384 | d = self.engine_launcher.stop() |
|
379 | d = self.engine_launcher.stop() | |
385 | return d |
|
380 | return d | |
386 | else: |
|
381 | else: | |
387 | return None |
|
382 | return None | |
388 |
|
383 | |||
389 | def stop_launchers(self, r=None): |
|
384 | def stop_launchers(self, r=None): | |
390 | if not self._stopping: |
|
385 | if not self._stopping: | |
391 | self._stopping = True |
|
386 | self._stopping = True | |
392 | self.log.error("IPython cluster: stopping") |
|
387 | self.log.error("IPython cluster: stopping") | |
393 | self.stop_engines() |
|
388 | self.stop_engines() | |
394 | # Wait a few seconds to let things shut down. |
|
389 | # Wait a few seconds to let things shut down. | |
395 | dc = ioloop.DelayedCallback(self.loop.stop, 3000, self.loop) |
|
390 | dc = ioloop.DelayedCallback(self.loop.stop, 3000, self.loop) | |
396 | dc.start() |
|
391 | dc.start() | |
397 |
|
392 | |||
398 | def sigint_handler(self, signum, frame): |
|
393 | def sigint_handler(self, signum, frame): | |
399 | self.log.debug("SIGINT received, stopping launchers...") |
|
394 | self.log.debug("SIGINT received, stopping launchers...") | |
400 | self.stop_launchers() |
|
395 | self.stop_launchers() | |
401 |
|
396 | |||
402 | def start_logging(self): |
|
397 | def start_logging(self): | |
403 | # Remove old log files of the controller and engine |
|
398 | # Remove old log files of the controller and engine | |
404 | if self.clean_logs: |
|
399 | if self.clean_logs: | |
405 | log_dir = self.profile_dir.log_dir |
|
400 | log_dir = self.profile_dir.log_dir | |
406 | for f in os.listdir(log_dir): |
|
401 | for f in os.listdir(log_dir): | |
407 | if re.match(r'ip(engine|controller)z-\d+\.(log|err|out)',f): |
|
402 | if re.match(r'ip(engine|controller)z-\d+\.(log|err|out)',f): | |
408 | os.remove(os.path.join(log_dir, f)) |
|
403 | os.remove(os.path.join(log_dir, f)) | |
409 | # This will remove old log files for ipcluster itself |
|
404 | # This will remove old log files for ipcluster itself | |
410 | # super(IPBaseParallelApplication, self).start_logging() |
|
405 | # super(IPBaseParallelApplication, self).start_logging() | |
411 |
|
406 | |||
412 | def start(self): |
|
407 | def start(self): | |
413 | """Start the app for the engines subcommand.""" |
|
408 | """Start the app for the engines subcommand.""" | |
414 | self.log.info("IPython cluster: started") |
|
409 | self.log.info("IPython cluster: started") | |
415 | # First see if the cluster is already running |
|
410 | # First see if the cluster is already running | |
416 |
|
411 | |||
417 | # Now log and daemonize |
|
412 | # Now log and daemonize | |
418 | self.log.info( |
|
413 | self.log.info( | |
419 | 'Starting engines with [daemon=%r]' % self.daemonize |
|
414 | 'Starting engines with [daemon=%r]' % self.daemonize | |
420 | ) |
|
415 | ) | |
421 | # TODO: Get daemonize working on Windows or as a Windows Server. |
|
416 | # TODO: Get daemonize working on Windows or as a Windows Server. | |
422 | if self.daemonize: |
|
417 | if self.daemonize: | |
423 | if os.name=='posix': |
|
418 | if os.name=='posix': | |
424 | daemonize() |
|
419 | daemonize() | |
425 |
|
420 | |||
426 | dc = ioloop.DelayedCallback(self.start_engines, 0, self.loop) |
|
421 | dc = ioloop.DelayedCallback(self.start_engines, 0, self.loop) | |
427 | dc.start() |
|
422 | dc.start() | |
428 | # Now write the new pid file AFTER our new forked pid is active. |
|
423 | # Now write the new pid file AFTER our new forked pid is active. | |
429 | # self.write_pid_file() |
|
424 | # self.write_pid_file() | |
430 | try: |
|
425 | try: | |
431 | self.loop.start() |
|
426 | self.loop.start() | |
432 | except KeyboardInterrupt: |
|
427 | except KeyboardInterrupt: | |
433 | pass |
|
428 | pass | |
434 | except zmq.ZMQError as e: |
|
429 | except zmq.ZMQError as e: | |
435 | if e.errno == errno.EINTR: |
|
430 | if e.errno == errno.EINTR: | |
436 | pass |
|
431 | pass | |
437 | else: |
|
432 | else: | |
438 | raise |
|
433 | raise | |
439 |
|
434 | |||
440 | start_aliases = {} |
|
435 | start_aliases = {} | |
441 | start_aliases.update(engine_aliases) |
|
436 | start_aliases.update(engine_aliases) | |
442 | start_aliases.update(dict( |
|
437 | start_aliases.update(dict( | |
443 | delay='IPClusterStart.delay', |
|
438 | delay='IPClusterStart.delay', | |
444 | controller = 'IPClusterStart.controller_launcher_class', |
|
439 | controller = 'IPClusterStart.controller_launcher_class', | |
445 | )) |
|
440 | )) | |
446 | start_aliases['clean-logs'] = 'IPClusterStart.clean_logs' |
|
441 | start_aliases['clean-logs'] = 'IPClusterStart.clean_logs' | |
447 |
|
442 | |||
448 | class IPClusterStart(IPClusterEngines): |
|
443 | class IPClusterStart(IPClusterEngines): | |
449 |
|
444 | |||
450 | name = u'ipcluster' |
|
445 | name = u'ipcluster' | |
451 | description = start_help |
|
446 | description = start_help | |
452 | examples = _start_examples |
|
447 | examples = _start_examples | |
453 | default_log_level = logging.INFO |
|
448 | default_log_level = logging.INFO | |
454 | auto_create = Bool(True, config=True, |
|
449 | auto_create = Bool(True, config=True, | |
455 | help="whether to create the profile_dir if it doesn't exist") |
|
450 | help="whether to create the profile_dir if it doesn't exist") | |
456 | classes = List() |
|
451 | classes = List() | |
457 | def _classes_default(self,): |
|
452 | def _classes_default(self,): | |
458 | from IPython.parallel.apps import launcher |
|
453 | from IPython.parallel.apps import launcher | |
459 | return [ProfileDir] + [IPClusterEngines] + launcher.all_launchers |
|
454 | return [ProfileDir] + [IPClusterEngines] + launcher.all_launchers | |
460 |
|
455 | |||
461 | clean_logs = Bool(True, config=True, |
|
456 | clean_logs = Bool(True, config=True, | |
462 | help="whether to cleanup old logs before starting") |
|
457 | help="whether to cleanup old logs before starting") | |
463 |
|
458 | |||
464 | delay = CFloat(1., config=True, |
|
459 | delay = CFloat(1., config=True, | |
465 | help="delay (in s) between starting the controller and the engines") |
|
460 | help="delay (in s) between starting the controller and the engines") | |
466 |
|
461 | |||
467 | controller_launcher = Any(config=True, help="Deprecated, use controller_launcher_class") |
|
462 | controller_launcher = Any(config=True, help="Deprecated, use controller_launcher_class") | |
468 | def _controller_launcher_changed(self, name, old, new): |
|
463 | def _controller_launcher_changed(self, name, old, new): | |
469 | if isinstance(new, basestring): |
|
464 | if isinstance(new, basestring): | |
470 | # old 0.11-style config |
|
465 | # old 0.11-style config | |
471 | self.log.warn("WARNING: %s.controller_launcher is deprecated as of 0.12," |
|
466 | self.log.warn("WARNING: %s.controller_launcher is deprecated as of 0.12," | |
472 | " use controller_launcher_class" % self.__class__.__name__) |
|
467 | " use controller_launcher_class" % self.__class__.__name__) | |
473 | self.controller_launcher_class = new |
|
468 | self.controller_launcher_class = new | |
474 | controller_launcher_class = DottedObjectName('LocalControllerLauncher', |
|
469 | controller_launcher_class = DottedObjectName('LocalControllerLauncher', | |
475 | config=True, |
|
470 | config=True, | |
476 | help="""The class for launching a Controller. Change this value if you want |
|
471 | help="""The class for launching a Controller. Change this value if you want | |
477 | your controller to also be launched by a batch system, such as PBS,SGE,MPI,etc. |
|
472 | your controller to also be launched by a batch system, such as PBS,SGE,MPI,etc. | |
478 |
|
473 | |||
479 | Each launcher class has its own set of configuration options, for making sure |
|
474 | Each launcher class has its own set of configuration options, for making sure | |
480 | it will work in your environment. |
|
475 | it will work in your environment. | |
481 |
|
476 | |||
482 | Note that using a batch launcher for the controller *does not* put it |
|
477 | Note that using a batch launcher for the controller *does not* put it | |
483 | in the same batch job as the engines, so they will still start separately. |
|
478 | in the same batch job as the engines, so they will still start separately. | |
484 |
|
479 | |||
485 | IPython's bundled examples include: |
|
480 | IPython's bundled examples include: | |
486 |
|
481 | |||
487 | Local : start engines locally as subprocesses |
|
482 | Local : start engines locally as subprocesses | |
488 | MPI : use mpiexec to launch the controller in an MPI universe |
|
483 | MPI : use mpiexec to launch the controller in an MPI universe | |
489 | PBS : use PBS (qsub) to submit the controller to a batch queue |
|
484 | PBS : use PBS (qsub) to submit the controller to a batch queue | |
490 | SGE : use SGE (qsub) to submit the controller to a batch queue |
|
485 | SGE : use SGE (qsub) to submit the controller to a batch queue | |
491 | LSF : use LSF (bsub) to submit the controller to a batch queue |
|
486 | LSF : use LSF (bsub) to submit the controller to a batch queue | |
492 | HTCondor : use HTCondor to submit the controller to a batch queue |
|
487 | HTCondor : use HTCondor to submit the controller to a batch queue | |
493 | SSH : use SSH to start the controller |
|
488 | SSH : use SSH to start the controller | |
494 | WindowsHPC : use Windows HPC |
|
489 | WindowsHPC : use Windows HPC | |
495 |
|
490 | |||
496 | If you are using one of IPython's builtin launchers, you can specify just the |
|
491 | If you are using one of IPython's builtin launchers, you can specify just the | |
497 | prefix, e.g: |
|
492 | prefix, e.g: | |
498 |
|
493 | |||
499 | c.IPClusterStart.controller_launcher_class = 'SSH' |
|
494 | c.IPClusterStart.controller_launcher_class = 'SSH' | |
500 |
|
495 | |||
501 | or: |
|
496 | or: | |
502 |
|
497 | |||
503 | ipcluster start --controller=MPI |
|
498 | ipcluster start --controller=MPI | |
504 |
|
499 | |||
505 | """ |
|
500 | """ | |
506 | ) |
|
501 | ) | |
507 | reset = Bool(False, config=True, |
|
502 | reset = Bool(False, config=True, | |
508 | help="Whether to reset config files as part of '--create'." |
|
503 | help="Whether to reset config files as part of '--create'." | |
509 | ) |
|
504 | ) | |
510 |
|
505 | |||
511 | # flags = Dict(flags) |
|
506 | # flags = Dict(flags) | |
512 | aliases = Dict(start_aliases) |
|
507 | aliases = Dict(start_aliases) | |
513 |
|
508 | |||
514 | def init_launchers(self): |
|
509 | def init_launchers(self): | |
515 | self.controller_launcher = self.build_launcher(self.controller_launcher_class, 'Controller') |
|
510 | self.controller_launcher = self.build_launcher(self.controller_launcher_class, 'Controller') | |
516 | self.engine_launcher = self.build_launcher(self.engine_launcher_class, 'EngineSet') |
|
511 | self.engine_launcher = self.build_launcher(self.engine_launcher_class, 'EngineSet') | |
517 |
|
512 | |||
518 | def engines_stopped(self, r): |
|
513 | def engines_stopped(self, r): | |
519 | """prevent parent.engines_stopped from stopping everything on engine shutdown""" |
|
514 | """prevent parent.engines_stopped from stopping everything on engine shutdown""" | |
520 | pass |
|
515 | pass | |
521 |
|
516 | |||
522 | def start_controller(self): |
|
517 | def start_controller(self): | |
523 | self.log.info("Starting Controller with %s", self.controller_launcher_class) |
|
518 | self.log.info("Starting Controller with %s", self.controller_launcher_class) | |
524 | self.controller_launcher.on_stop(self.stop_launchers) |
|
519 | self.controller_launcher.on_stop(self.stop_launchers) | |
525 | self.controller_launcher.start() |
|
520 | self.controller_launcher.start() | |
526 |
|
521 | |||
527 | def stop_controller(self): |
|
522 | def stop_controller(self): | |
528 | # self.log.info("In stop_controller") |
|
523 | # self.log.info("In stop_controller") | |
529 | if self.controller_launcher and self.controller_launcher.running: |
|
524 | if self.controller_launcher and self.controller_launcher.running: | |
530 | return self.controller_launcher.stop() |
|
525 | return self.controller_launcher.stop() | |
531 |
|
526 | |||
532 | def stop_launchers(self, r=None): |
|
527 | def stop_launchers(self, r=None): | |
533 | if not self._stopping: |
|
528 | if not self._stopping: | |
534 | self.stop_controller() |
|
529 | self.stop_controller() | |
535 | super(IPClusterStart, self).stop_launchers() |
|
530 | super(IPClusterStart, self).stop_launchers() | |
536 |
|
531 | |||
537 | def start(self): |
|
532 | def start(self): | |
538 | """Start the app for the start subcommand.""" |
|
533 | """Start the app for the start subcommand.""" | |
539 | # First see if the cluster is already running |
|
534 | # First see if the cluster is already running | |
540 | try: |
|
535 | try: | |
541 | pid = self.get_pid_from_file() |
|
536 | pid = self.get_pid_from_file() | |
542 | except PIDFileError: |
|
537 | except PIDFileError: | |
543 | pass |
|
538 | pass | |
544 | else: |
|
539 | else: | |
545 | if self.check_pid(pid): |
|
540 | if self.check_pid(pid): | |
546 | self.log.critical( |
|
541 | self.log.critical( | |
547 | 'Cluster is already running with [pid=%s]. ' |
|
542 | 'Cluster is already running with [pid=%s]. ' | |
548 | 'use "ipcluster stop" to stop the cluster.' % pid |
|
543 | 'use "ipcluster stop" to stop the cluster.' % pid | |
549 | ) |
|
544 | ) | |
550 | # Here I exit with a unusual exit status that other processes |
|
545 | # Here I exit with a unusual exit status that other processes | |
551 | # can watch for to learn how I existed. |
|
546 | # can watch for to learn how I existed. | |
552 | self.exit(ALREADY_STARTED) |
|
547 | self.exit(ALREADY_STARTED) | |
553 | else: |
|
548 | else: | |
554 | self.remove_pid_file() |
|
549 | self.remove_pid_file() | |
555 |
|
550 | |||
556 |
|
551 | |||
557 | # Now log and daemonize |
|
552 | # Now log and daemonize | |
558 | self.log.info( |
|
553 | self.log.info( | |
559 | 'Starting ipcluster with [daemon=%r]' % self.daemonize |
|
554 | 'Starting ipcluster with [daemon=%r]' % self.daemonize | |
560 | ) |
|
555 | ) | |
561 | # TODO: Get daemonize working on Windows or as a Windows Server. |
|
556 | # TODO: Get daemonize working on Windows or as a Windows Server. | |
562 | if self.daemonize: |
|
557 | if self.daemonize: | |
563 | if os.name=='posix': |
|
558 | if os.name=='posix': | |
564 | daemonize() |
|
559 | daemonize() | |
565 |
|
560 | |||
566 | dc = ioloop.DelayedCallback(self.start_controller, 0, self.loop) |
|
561 | dc = ioloop.DelayedCallback(self.start_controller, 0, self.loop) | |
567 | dc.start() |
|
562 | dc.start() | |
568 | dc = ioloop.DelayedCallback(self.start_engines, 1000*self.delay, self.loop) |
|
563 | dc = ioloop.DelayedCallback(self.start_engines, 1000*self.delay, self.loop) | |
569 | dc.start() |
|
564 | dc.start() | |
570 | # Now write the new pid file AFTER our new forked pid is active. |
|
565 | # Now write the new pid file AFTER our new forked pid is active. | |
571 | self.write_pid_file() |
|
566 | self.write_pid_file() | |
572 | try: |
|
567 | try: | |
573 | self.loop.start() |
|
568 | self.loop.start() | |
574 | except KeyboardInterrupt: |
|
569 | except KeyboardInterrupt: | |
575 | pass |
|
570 | pass | |
576 | except zmq.ZMQError as e: |
|
571 | except zmq.ZMQError as e: | |
577 | if e.errno == errno.EINTR: |
|
572 | if e.errno == errno.EINTR: | |
578 | pass |
|
573 | pass | |
579 | else: |
|
574 | else: | |
580 | raise |
|
575 | raise | |
581 | finally: |
|
576 | finally: | |
582 | self.remove_pid_file() |
|
577 | self.remove_pid_file() | |
583 |
|
578 | |||
584 | base='IPython.parallel.apps.ipclusterapp.IPCluster' |
|
579 | base='IPython.parallel.apps.ipclusterapp.IPCluster' | |
585 |
|
580 | |||
586 | class IPClusterApp(BaseIPythonApplication): |
|
581 | class IPClusterApp(BaseIPythonApplication): | |
587 | name = u'ipcluster' |
|
582 | name = u'ipcluster' | |
588 | description = _description |
|
583 | description = _description | |
589 | examples = _main_examples |
|
584 | examples = _main_examples | |
590 |
|
585 | |||
591 | subcommands = { |
|
586 | subcommands = { | |
592 | 'start' : (base+'Start', start_help), |
|
587 | 'start' : (base+'Start', start_help), | |
593 | 'stop' : (base+'Stop', stop_help), |
|
588 | 'stop' : (base+'Stop', stop_help), | |
594 | 'engines' : (base+'Engines', engines_help), |
|
589 | 'engines' : (base+'Engines', engines_help), | |
595 | } |
|
590 | } | |
596 |
|
591 | |||
597 | # no aliases or flags for parent App |
|
592 | # no aliases or flags for parent App | |
598 | aliases = Dict() |
|
593 | aliases = Dict() | |
599 | flags = Dict() |
|
594 | flags = Dict() | |
600 |
|
595 | |||
601 | def start(self): |
|
596 | def start(self): | |
602 | if self.subapp is None: |
|
597 | if self.subapp is None: | |
603 | print "No subcommand specified. Must specify one of: %s"%(self.subcommands.keys()) |
|
598 | print "No subcommand specified. Must specify one of: %s"%(self.subcommands.keys()) | |
604 |
|
599 | |||
605 | self.print_description() |
|
600 | self.print_description() | |
606 | self.print_subcommands() |
|
601 | self.print_subcommands() | |
607 | self.exit(1) |
|
602 | self.exit(1) | |
608 | else: |
|
603 | else: | |
609 | return self.subapp.start() |
|
604 | return self.subapp.start() | |
610 |
|
605 | |||
611 | launch_new_instance = IPClusterApp.launch_instance |
|
606 | launch_new_instance = IPClusterApp.launch_instance | |
612 |
|
607 | |||
613 | if __name__ == '__main__': |
|
608 | if __name__ == '__main__': | |
614 | launch_new_instance() |
|
609 | launch_new_instance() | |
615 |
|
610 |
@@ -1,551 +1,546 | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 | """ |
|
3 | """ | |
4 | The IPython controller application. |
|
4 | The IPython controller application. | |
5 |
|
5 | |||
6 | Authors: |
|
6 | Authors: | |
7 |
|
7 | |||
8 | * Brian Granger |
|
8 | * Brian Granger | |
9 | * MinRK |
|
9 | * MinRK | |
10 |
|
10 | |||
11 | """ |
|
11 | """ | |
12 |
|
12 | |||
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 | # Copyright (C) 2008-2011 The IPython Development Team |
|
14 | # Copyright (C) 2008-2011 The IPython Development Team | |
15 | # |
|
15 | # | |
16 | # Distributed under the terms of the BSD License. The full license is in |
|
16 | # Distributed under the terms of the BSD License. The full license is in | |
17 | # the file COPYING, distributed as part of this software. |
|
17 | # the file COPYING, distributed as part of this software. | |
18 | #----------------------------------------------------------------------------- |
|
18 | #----------------------------------------------------------------------------- | |
19 |
|
19 | |||
20 | #----------------------------------------------------------------------------- |
|
20 | #----------------------------------------------------------------------------- | |
21 | # Imports |
|
21 | # Imports | |
22 | #----------------------------------------------------------------------------- |
|
22 | #----------------------------------------------------------------------------- | |
23 |
|
23 | |||
24 | from __future__ import with_statement |
|
24 | from __future__ import with_statement | |
25 |
|
25 | |||
26 | import json |
|
26 | import json | |
27 | import os |
|
27 | import os | |
28 | import stat |
|
28 | import stat | |
29 | import sys |
|
29 | import sys | |
30 |
|
30 | |||
31 | from multiprocessing import Process |
|
31 | from multiprocessing import Process | |
32 | from signal import signal, SIGINT, SIGABRT, SIGTERM |
|
32 | from signal import signal, SIGINT, SIGABRT, SIGTERM | |
33 |
|
33 | |||
34 | import zmq |
|
34 | import zmq | |
35 | from zmq.devices import ProcessMonitoredQueue |
|
35 | from zmq.devices import ProcessMonitoredQueue | |
36 | from zmq.log.handlers import PUBHandler |
|
36 | from zmq.log.handlers import PUBHandler | |
37 |
|
37 | |||
38 | from IPython.core.profiledir import ProfileDir |
|
38 | from IPython.core.profiledir import ProfileDir | |
39 |
|
39 | |||
40 | from IPython.parallel.apps.baseapp import ( |
|
40 | from IPython.parallel.apps.baseapp import ( | |
41 | BaseParallelApplication, |
|
41 | BaseParallelApplication, | |
42 | base_aliases, |
|
42 | base_aliases, | |
43 | base_flags, |
|
43 | base_flags, | |
44 | catch_config_error, |
|
44 | catch_config_error, | |
45 | ) |
|
45 | ) | |
46 | from IPython.utils.importstring import import_item |
|
46 | from IPython.utils.importstring import import_item | |
47 | from IPython.utils.localinterfaces import LOCALHOST, PUBLIC_IPS |
|
47 | from IPython.utils.localinterfaces import LOCALHOST, PUBLIC_IPS | |
48 | from IPython.utils.traitlets import Instance, Unicode, Bool, List, Dict, TraitError |
|
48 | from IPython.utils.traitlets import Instance, Unicode, Bool, List, Dict, TraitError | |
49 |
|
49 | |||
50 | from IPython.kernel.zmq.session import ( |
|
50 | from IPython.kernel.zmq.session import ( | |
51 | Session, session_aliases, session_flags, default_secure |
|
51 | Session, session_aliases, session_flags, default_secure | |
52 | ) |
|
52 | ) | |
53 |
|
53 | |||
54 | from IPython.parallel.controller.heartmonitor import HeartMonitor |
|
54 | from IPython.parallel.controller.heartmonitor import HeartMonitor | |
55 | from IPython.parallel.controller.hub import HubFactory |
|
55 | from IPython.parallel.controller.hub import HubFactory | |
56 | from IPython.parallel.controller.scheduler import TaskScheduler,launch_scheduler |
|
56 | from IPython.parallel.controller.scheduler import TaskScheduler,launch_scheduler | |
57 | from IPython.parallel.controller.dictdb import DictDB |
|
57 | from IPython.parallel.controller.dictdb import DictDB | |
58 |
|
58 | |||
59 | from IPython.parallel.util import split_url, disambiguate_url, set_hwm |
|
59 | from IPython.parallel.util import split_url, disambiguate_url, set_hwm | |
60 |
|
60 | |||
61 | # conditional import of SQLiteDB / MongoDB backend class |
|
61 | # conditional import of SQLiteDB / MongoDB backend class | |
62 | real_dbs = [] |
|
62 | real_dbs = [] | |
63 |
|
63 | |||
64 | try: |
|
64 | try: | |
65 | from IPython.parallel.controller.sqlitedb import SQLiteDB |
|
65 | from IPython.parallel.controller.sqlitedb import SQLiteDB | |
66 | except ImportError: |
|
66 | except ImportError: | |
67 | pass |
|
67 | pass | |
68 | else: |
|
68 | else: | |
69 | real_dbs.append(SQLiteDB) |
|
69 | real_dbs.append(SQLiteDB) | |
70 |
|
70 | |||
71 | try: |
|
71 | try: | |
72 | from IPython.parallel.controller.mongodb import MongoDB |
|
72 | from IPython.parallel.controller.mongodb import MongoDB | |
73 | except ImportError: |
|
73 | except ImportError: | |
74 | pass |
|
74 | pass | |
75 | else: |
|
75 | else: | |
76 | real_dbs.append(MongoDB) |
|
76 | real_dbs.append(MongoDB) | |
77 |
|
77 | |||
78 |
|
78 | |||
79 |
|
79 | |||
80 | #----------------------------------------------------------------------------- |
|
80 | #----------------------------------------------------------------------------- | |
81 | # Module level variables |
|
81 | # Module level variables | |
82 | #----------------------------------------------------------------------------- |
|
82 | #----------------------------------------------------------------------------- | |
83 |
|
83 | |||
84 |
|
84 | |||
85 | #: The default config file name for this application |
|
|||
86 | default_config_file_name = u'ipcontroller_config.py' |
|
|||
87 |
|
||||
88 |
|
||||
89 | _description = """Start the IPython controller for parallel computing. |
|
85 | _description = """Start the IPython controller for parallel computing. | |
90 |
|
86 | |||
91 | The IPython controller provides a gateway between the IPython engines and |
|
87 | The IPython controller provides a gateway between the IPython engines and | |
92 | clients. The controller needs to be started before the engines and can be |
|
88 | clients. The controller needs to be started before the engines and can be | |
93 | configured using command line options or using a cluster directory. Cluster |
|
89 | configured using command line options or using a cluster directory. Cluster | |
94 | directories contain config, log and security files and are usually located in |
|
90 | directories contain config, log and security files and are usually located in | |
95 | your ipython directory and named as "profile_name". See the `profile` |
|
91 | your ipython directory and named as "profile_name". See the `profile` | |
96 | and `profile-dir` options for details. |
|
92 | and `profile-dir` options for details. | |
97 | """ |
|
93 | """ | |
98 |
|
94 | |||
99 | _examples = """ |
|
95 | _examples = """ | |
100 | ipcontroller --ip=192.168.0.1 --port=1000 # listen on ip, port for engines |
|
96 | ipcontroller --ip=192.168.0.1 --port=1000 # listen on ip, port for engines | |
101 | ipcontroller --scheme=pure # use the pure zeromq scheduler |
|
97 | ipcontroller --scheme=pure # use the pure zeromq scheduler | |
102 | """ |
|
98 | """ | |
103 |
|
99 | |||
104 |
|
100 | |||
105 | #----------------------------------------------------------------------------- |
|
101 | #----------------------------------------------------------------------------- | |
106 | # The main application |
|
102 | # The main application | |
107 | #----------------------------------------------------------------------------- |
|
103 | #----------------------------------------------------------------------------- | |
108 | flags = {} |
|
104 | flags = {} | |
109 | flags.update(base_flags) |
|
105 | flags.update(base_flags) | |
110 | flags.update({ |
|
106 | flags.update({ | |
111 | 'usethreads' : ( {'IPControllerApp' : {'use_threads' : True}}, |
|
107 | 'usethreads' : ( {'IPControllerApp' : {'use_threads' : True}}, | |
112 | 'Use threads instead of processes for the schedulers'), |
|
108 | 'Use threads instead of processes for the schedulers'), | |
113 | 'sqlitedb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.sqlitedb.SQLiteDB'}}, |
|
109 | 'sqlitedb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.sqlitedb.SQLiteDB'}}, | |
114 | 'use the SQLiteDB backend'), |
|
110 | 'use the SQLiteDB backend'), | |
115 | 'mongodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.mongodb.MongoDB'}}, |
|
111 | 'mongodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.mongodb.MongoDB'}}, | |
116 | 'use the MongoDB backend'), |
|
112 | 'use the MongoDB backend'), | |
117 | 'dictdb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.DictDB'}}, |
|
113 | 'dictdb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.DictDB'}}, | |
118 | 'use the in-memory DictDB backend'), |
|
114 | 'use the in-memory DictDB backend'), | |
119 | 'nodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.NoDB'}}, |
|
115 | 'nodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.NoDB'}}, | |
120 | """use dummy DB backend, which doesn't store any information. |
|
116 | """use dummy DB backend, which doesn't store any information. | |
121 |
|
117 | |||
122 | This is the default as of IPython 0.13. |
|
118 | This is the default as of IPython 0.13. | |
123 |
|
119 | |||
124 | To enable delayed or repeated retrieval of results from the Hub, |
|
120 | To enable delayed or repeated retrieval of results from the Hub, | |
125 | select one of the true db backends. |
|
121 | select one of the true db backends. | |
126 | """), |
|
122 | """), | |
127 | 'reuse' : ({'IPControllerApp' : {'reuse_files' : True}}, |
|
123 | 'reuse' : ({'IPControllerApp' : {'reuse_files' : True}}, | |
128 | 'reuse existing json connection files'), |
|
124 | 'reuse existing json connection files'), | |
129 | 'restore' : ({'IPControllerApp' : {'restore_engines' : True, 'reuse_files' : True}}, |
|
125 | 'restore' : ({'IPControllerApp' : {'restore_engines' : True, 'reuse_files' : True}}, | |
130 | 'Attempt to restore engines from a JSON file. ' |
|
126 | 'Attempt to restore engines from a JSON file. ' | |
131 | 'For use when resuming a crashed controller'), |
|
127 | 'For use when resuming a crashed controller'), | |
132 | }) |
|
128 | }) | |
133 |
|
129 | |||
134 | flags.update(session_flags) |
|
130 | flags.update(session_flags) | |
135 |
|
131 | |||
136 | aliases = dict( |
|
132 | aliases = dict( | |
137 | ssh = 'IPControllerApp.ssh_server', |
|
133 | ssh = 'IPControllerApp.ssh_server', | |
138 | enginessh = 'IPControllerApp.engine_ssh_server', |
|
134 | enginessh = 'IPControllerApp.engine_ssh_server', | |
139 | location = 'IPControllerApp.location', |
|
135 | location = 'IPControllerApp.location', | |
140 |
|
136 | |||
141 | url = 'HubFactory.url', |
|
137 | url = 'HubFactory.url', | |
142 | ip = 'HubFactory.ip', |
|
138 | ip = 'HubFactory.ip', | |
143 | transport = 'HubFactory.transport', |
|
139 | transport = 'HubFactory.transport', | |
144 | port = 'HubFactory.regport', |
|
140 | port = 'HubFactory.regport', | |
145 |
|
141 | |||
146 | ping = 'HeartMonitor.period', |
|
142 | ping = 'HeartMonitor.period', | |
147 |
|
143 | |||
148 | scheme = 'TaskScheduler.scheme_name', |
|
144 | scheme = 'TaskScheduler.scheme_name', | |
149 | hwm = 'TaskScheduler.hwm', |
|
145 | hwm = 'TaskScheduler.hwm', | |
150 | ) |
|
146 | ) | |
151 | aliases.update(base_aliases) |
|
147 | aliases.update(base_aliases) | |
152 | aliases.update(session_aliases) |
|
148 | aliases.update(session_aliases) | |
153 |
|
149 | |||
154 | class IPControllerApp(BaseParallelApplication): |
|
150 | class IPControllerApp(BaseParallelApplication): | |
155 |
|
151 | |||
156 | name = u'ipcontroller' |
|
152 | name = u'ipcontroller' | |
157 | description = _description |
|
153 | description = _description | |
158 | examples = _examples |
|
154 | examples = _examples | |
159 | config_file_name = Unicode(default_config_file_name) |
|
|||
160 | classes = [ProfileDir, Session, HubFactory, TaskScheduler, HeartMonitor, DictDB] + real_dbs |
|
155 | classes = [ProfileDir, Session, HubFactory, TaskScheduler, HeartMonitor, DictDB] + real_dbs | |
161 |
|
156 | |||
162 | # change default to True |
|
157 | # change default to True | |
163 | auto_create = Bool(True, config=True, |
|
158 | auto_create = Bool(True, config=True, | |
164 | help="""Whether to create profile dir if it doesn't exist.""") |
|
159 | help="""Whether to create profile dir if it doesn't exist.""") | |
165 |
|
160 | |||
166 | reuse_files = Bool(False, config=True, |
|
161 | reuse_files = Bool(False, config=True, | |
167 | help="""Whether to reuse existing json connection files. |
|
162 | help="""Whether to reuse existing json connection files. | |
168 | If False, connection files will be removed on a clean exit. |
|
163 | If False, connection files will be removed on a clean exit. | |
169 | """ |
|
164 | """ | |
170 | ) |
|
165 | ) | |
171 | restore_engines = Bool(False, config=True, |
|
166 | restore_engines = Bool(False, config=True, | |
172 | help="""Reload engine state from JSON file |
|
167 | help="""Reload engine state from JSON file | |
173 | """ |
|
168 | """ | |
174 | ) |
|
169 | ) | |
175 | ssh_server = Unicode(u'', config=True, |
|
170 | ssh_server = Unicode(u'', config=True, | |
176 | help="""ssh url for clients to use when connecting to the Controller |
|
171 | help="""ssh url for clients to use when connecting to the Controller | |
177 | processes. It should be of the form: [user@]server[:port]. The |
|
172 | processes. It should be of the form: [user@]server[:port]. The | |
178 | Controller's listening addresses must be accessible from the ssh server""", |
|
173 | Controller's listening addresses must be accessible from the ssh server""", | |
179 | ) |
|
174 | ) | |
180 | engine_ssh_server = Unicode(u'', config=True, |
|
175 | engine_ssh_server = Unicode(u'', config=True, | |
181 | help="""ssh url for engines to use when connecting to the Controller |
|
176 | help="""ssh url for engines to use when connecting to the Controller | |
182 | processes. It should be of the form: [user@]server[:port]. The |
|
177 | processes. It should be of the form: [user@]server[:port]. The | |
183 | Controller's listening addresses must be accessible from the ssh server""", |
|
178 | Controller's listening addresses must be accessible from the ssh server""", | |
184 | ) |
|
179 | ) | |
185 | location = Unicode(u'', config=True, |
|
180 | location = Unicode(u'', config=True, | |
186 | help="""The external IP or domain name of the Controller, used for disambiguating |
|
181 | help="""The external IP or domain name of the Controller, used for disambiguating | |
187 | engine and client connections.""", |
|
182 | engine and client connections.""", | |
188 | ) |
|
183 | ) | |
189 | import_statements = List([], config=True, |
|
184 | import_statements = List([], config=True, | |
190 | help="import statements to be run at startup. Necessary in some environments" |
|
185 | help="import statements to be run at startup. Necessary in some environments" | |
191 | ) |
|
186 | ) | |
192 |
|
187 | |||
193 | use_threads = Bool(False, config=True, |
|
188 | use_threads = Bool(False, config=True, | |
194 | help='Use threads instead of processes for the schedulers', |
|
189 | help='Use threads instead of processes for the schedulers', | |
195 | ) |
|
190 | ) | |
196 |
|
191 | |||
197 | engine_json_file = Unicode('ipcontroller-engine.json', config=True, |
|
192 | engine_json_file = Unicode('ipcontroller-engine.json', config=True, | |
198 | help="JSON filename where engine connection info will be stored.") |
|
193 | help="JSON filename where engine connection info will be stored.") | |
199 | client_json_file = Unicode('ipcontroller-client.json', config=True, |
|
194 | client_json_file = Unicode('ipcontroller-client.json', config=True, | |
200 | help="JSON filename where client connection info will be stored.") |
|
195 | help="JSON filename where client connection info will be stored.") | |
201 |
|
196 | |||
202 | def _cluster_id_changed(self, name, old, new): |
|
197 | def _cluster_id_changed(self, name, old, new): | |
203 | super(IPControllerApp, self)._cluster_id_changed(name, old, new) |
|
198 | super(IPControllerApp, self)._cluster_id_changed(name, old, new) | |
204 | self.engine_json_file = "%s-engine.json" % self.name |
|
199 | self.engine_json_file = "%s-engine.json" % self.name | |
205 | self.client_json_file = "%s-client.json" % self.name |
|
200 | self.client_json_file = "%s-client.json" % self.name | |
206 |
|
201 | |||
207 |
|
202 | |||
208 | # internal |
|
203 | # internal | |
209 | children = List() |
|
204 | children = List() | |
210 | mq_class = Unicode('zmq.devices.ProcessMonitoredQueue') |
|
205 | mq_class = Unicode('zmq.devices.ProcessMonitoredQueue') | |
211 |
|
206 | |||
212 | def _use_threads_changed(self, name, old, new): |
|
207 | def _use_threads_changed(self, name, old, new): | |
213 | self.mq_class = 'zmq.devices.%sMonitoredQueue'%('Thread' if new else 'Process') |
|
208 | self.mq_class = 'zmq.devices.%sMonitoredQueue'%('Thread' if new else 'Process') | |
214 |
|
209 | |||
215 | write_connection_files = Bool(True, |
|
210 | write_connection_files = Bool(True, | |
216 | help="""Whether to write connection files to disk. |
|
211 | help="""Whether to write connection files to disk. | |
217 | True in all cases other than runs with `reuse_files=True` *after the first* |
|
212 | True in all cases other than runs with `reuse_files=True` *after the first* | |
218 | """ |
|
213 | """ | |
219 | ) |
|
214 | ) | |
220 |
|
215 | |||
221 | aliases = Dict(aliases) |
|
216 | aliases = Dict(aliases) | |
222 | flags = Dict(flags) |
|
217 | flags = Dict(flags) | |
223 |
|
218 | |||
224 |
|
219 | |||
225 | def save_connection_dict(self, fname, cdict): |
|
220 | def save_connection_dict(self, fname, cdict): | |
226 | """save a connection dict to json file.""" |
|
221 | """save a connection dict to json file.""" | |
227 | c = self.config |
|
222 | c = self.config | |
228 | url = cdict['registration'] |
|
223 | url = cdict['registration'] | |
229 | location = cdict['location'] |
|
224 | location = cdict['location'] | |
230 |
|
225 | |||
231 | if not location: |
|
226 | if not location: | |
232 | if PUBLIC_IPS: |
|
227 | if PUBLIC_IPS: | |
233 | location = PUBLIC_IPS[-1] |
|
228 | location = PUBLIC_IPS[-1] | |
234 | else: |
|
229 | else: | |
235 | self.log.warn("Could not identify this machine's IP, assuming %s." |
|
230 | self.log.warn("Could not identify this machine's IP, assuming %s." | |
236 | " You may need to specify '--location=<external_ip_address>' to help" |
|
231 | " You may need to specify '--location=<external_ip_address>' to help" | |
237 | " IPython decide when to connect via loopback." % LOCALHOST) |
|
232 | " IPython decide when to connect via loopback." % LOCALHOST) | |
238 | location = LOCALHOST |
|
233 | location = LOCALHOST | |
239 | cdict['location'] = location |
|
234 | cdict['location'] = location | |
240 | fname = os.path.join(self.profile_dir.security_dir, fname) |
|
235 | fname = os.path.join(self.profile_dir.security_dir, fname) | |
241 | self.log.info("writing connection info to %s", fname) |
|
236 | self.log.info("writing connection info to %s", fname) | |
242 | with open(fname, 'w') as f: |
|
237 | with open(fname, 'w') as f: | |
243 | f.write(json.dumps(cdict, indent=2)) |
|
238 | f.write(json.dumps(cdict, indent=2)) | |
244 | os.chmod(fname, stat.S_IRUSR|stat.S_IWUSR) |
|
239 | os.chmod(fname, stat.S_IRUSR|stat.S_IWUSR) | |
245 |
|
240 | |||
246 | def load_config_from_json(self): |
|
241 | def load_config_from_json(self): | |
247 | """load config from existing json connector files.""" |
|
242 | """load config from existing json connector files.""" | |
248 | c = self.config |
|
243 | c = self.config | |
249 | self.log.debug("loading config from JSON") |
|
244 | self.log.debug("loading config from JSON") | |
250 |
|
245 | |||
251 | # load engine config |
|
246 | # load engine config | |
252 |
|
247 | |||
253 | fname = os.path.join(self.profile_dir.security_dir, self.engine_json_file) |
|
248 | fname = os.path.join(self.profile_dir.security_dir, self.engine_json_file) | |
254 | self.log.info("loading connection info from %s", fname) |
|
249 | self.log.info("loading connection info from %s", fname) | |
255 | with open(fname) as f: |
|
250 | with open(fname) as f: | |
256 | ecfg = json.loads(f.read()) |
|
251 | ecfg = json.loads(f.read()) | |
257 |
|
252 | |||
258 | # json gives unicode, Session.key wants bytes |
|
253 | # json gives unicode, Session.key wants bytes | |
259 | c.Session.key = ecfg['exec_key'].encode('ascii') |
|
254 | c.Session.key = ecfg['exec_key'].encode('ascii') | |
260 |
|
255 | |||
261 | xport,ip = ecfg['interface'].split('://') |
|
256 | xport,ip = ecfg['interface'].split('://') | |
262 |
|
257 | |||
263 | c.HubFactory.engine_ip = ip |
|
258 | c.HubFactory.engine_ip = ip | |
264 | c.HubFactory.engine_transport = xport |
|
259 | c.HubFactory.engine_transport = xport | |
265 |
|
260 | |||
266 | self.location = ecfg['location'] |
|
261 | self.location = ecfg['location'] | |
267 | if not self.engine_ssh_server: |
|
262 | if not self.engine_ssh_server: | |
268 | self.engine_ssh_server = ecfg['ssh'] |
|
263 | self.engine_ssh_server = ecfg['ssh'] | |
269 |
|
264 | |||
270 | # load client config |
|
265 | # load client config | |
271 |
|
266 | |||
272 | fname = os.path.join(self.profile_dir.security_dir, self.client_json_file) |
|
267 | fname = os.path.join(self.profile_dir.security_dir, self.client_json_file) | |
273 | self.log.info("loading connection info from %s", fname) |
|
268 | self.log.info("loading connection info from %s", fname) | |
274 | with open(fname) as f: |
|
269 | with open(fname) as f: | |
275 | ccfg = json.loads(f.read()) |
|
270 | ccfg = json.loads(f.read()) | |
276 |
|
271 | |||
277 | for key in ('exec_key', 'registration', 'pack', 'unpack'): |
|
272 | for key in ('exec_key', 'registration', 'pack', 'unpack'): | |
278 | assert ccfg[key] == ecfg[key], "mismatch between engine and client info: %r" % key |
|
273 | assert ccfg[key] == ecfg[key], "mismatch between engine and client info: %r" % key | |
279 |
|
274 | |||
280 | xport,addr = ccfg['interface'].split('://') |
|
275 | xport,addr = ccfg['interface'].split('://') | |
281 |
|
276 | |||
282 | c.HubFactory.client_transport = xport |
|
277 | c.HubFactory.client_transport = xport | |
283 | c.HubFactory.client_ip = ip |
|
278 | c.HubFactory.client_ip = ip | |
284 | if not self.ssh_server: |
|
279 | if not self.ssh_server: | |
285 | self.ssh_server = ccfg['ssh'] |
|
280 | self.ssh_server = ccfg['ssh'] | |
286 |
|
281 | |||
287 | # load port config: |
|
282 | # load port config: | |
288 | c.HubFactory.regport = ecfg['registration'] |
|
283 | c.HubFactory.regport = ecfg['registration'] | |
289 | c.HubFactory.hb = (ecfg['hb_ping'], ecfg['hb_pong']) |
|
284 | c.HubFactory.hb = (ecfg['hb_ping'], ecfg['hb_pong']) | |
290 | c.HubFactory.control = (ccfg['control'], ecfg['control']) |
|
285 | c.HubFactory.control = (ccfg['control'], ecfg['control']) | |
291 | c.HubFactory.mux = (ccfg['mux'], ecfg['mux']) |
|
286 | c.HubFactory.mux = (ccfg['mux'], ecfg['mux']) | |
292 | c.HubFactory.task = (ccfg['task'], ecfg['task']) |
|
287 | c.HubFactory.task = (ccfg['task'], ecfg['task']) | |
293 | c.HubFactory.iopub = (ccfg['iopub'], ecfg['iopub']) |
|
288 | c.HubFactory.iopub = (ccfg['iopub'], ecfg['iopub']) | |
294 | c.HubFactory.notifier_port = ccfg['notification'] |
|
289 | c.HubFactory.notifier_port = ccfg['notification'] | |
295 |
|
290 | |||
296 | def cleanup_connection_files(self): |
|
291 | def cleanup_connection_files(self): | |
297 | if self.reuse_files: |
|
292 | if self.reuse_files: | |
298 | self.log.debug("leaving JSON connection files for reuse") |
|
293 | self.log.debug("leaving JSON connection files for reuse") | |
299 | return |
|
294 | return | |
300 | self.log.debug("cleaning up JSON connection files") |
|
295 | self.log.debug("cleaning up JSON connection files") | |
301 | for f in (self.client_json_file, self.engine_json_file): |
|
296 | for f in (self.client_json_file, self.engine_json_file): | |
302 | f = os.path.join(self.profile_dir.security_dir, f) |
|
297 | f = os.path.join(self.profile_dir.security_dir, f) | |
303 | try: |
|
298 | try: | |
304 | os.remove(f) |
|
299 | os.remove(f) | |
305 | except Exception as e: |
|
300 | except Exception as e: | |
306 | self.log.error("Failed to cleanup connection file: %s", e) |
|
301 | self.log.error("Failed to cleanup connection file: %s", e) | |
307 | else: |
|
302 | else: | |
308 | self.log.debug(u"removed %s", f) |
|
303 | self.log.debug(u"removed %s", f) | |
309 |
|
304 | |||
310 | def load_secondary_config(self): |
|
305 | def load_secondary_config(self): | |
311 | """secondary config, loading from JSON and setting defaults""" |
|
306 | """secondary config, loading from JSON and setting defaults""" | |
312 | if self.reuse_files: |
|
307 | if self.reuse_files: | |
313 | try: |
|
308 | try: | |
314 | self.load_config_from_json() |
|
309 | self.load_config_from_json() | |
315 | except (AssertionError,IOError) as e: |
|
310 | except (AssertionError,IOError) as e: | |
316 | self.log.error("Could not load config from JSON: %s" % e) |
|
311 | self.log.error("Could not load config from JSON: %s" % e) | |
317 | else: |
|
312 | else: | |
318 | # successfully loaded config from JSON, and reuse=True |
|
313 | # successfully loaded config from JSON, and reuse=True | |
319 | # no need to wite back the same file |
|
314 | # no need to wite back the same file | |
320 | self.write_connection_files = False |
|
315 | self.write_connection_files = False | |
321 |
|
316 | |||
322 | # switch Session.key default to secure |
|
317 | # switch Session.key default to secure | |
323 | default_secure(self.config) |
|
318 | default_secure(self.config) | |
324 | self.log.debug("Config changed") |
|
319 | self.log.debug("Config changed") | |
325 | self.log.debug(repr(self.config)) |
|
320 | self.log.debug(repr(self.config)) | |
326 |
|
321 | |||
327 | def init_hub(self): |
|
322 | def init_hub(self): | |
328 | c = self.config |
|
323 | c = self.config | |
329 |
|
324 | |||
330 | self.do_import_statements() |
|
325 | self.do_import_statements() | |
331 |
|
326 | |||
332 | try: |
|
327 | try: | |
333 | self.factory = HubFactory(config=c, log=self.log) |
|
328 | self.factory = HubFactory(config=c, log=self.log) | |
334 | # self.start_logging() |
|
329 | # self.start_logging() | |
335 | self.factory.init_hub() |
|
330 | self.factory.init_hub() | |
336 | except TraitError: |
|
331 | except TraitError: | |
337 | raise |
|
332 | raise | |
338 | except Exception: |
|
333 | except Exception: | |
339 | self.log.error("Couldn't construct the Controller", exc_info=True) |
|
334 | self.log.error("Couldn't construct the Controller", exc_info=True) | |
340 | self.exit(1) |
|
335 | self.exit(1) | |
341 |
|
336 | |||
342 | if self.write_connection_files: |
|
337 | if self.write_connection_files: | |
343 | # save to new json config files |
|
338 | # save to new json config files | |
344 | f = self.factory |
|
339 | f = self.factory | |
345 | base = { |
|
340 | base = { | |
346 | 'exec_key' : f.session.key.decode('ascii'), |
|
341 | 'exec_key' : f.session.key.decode('ascii'), | |
347 | 'location' : self.location, |
|
342 | 'location' : self.location, | |
348 | 'pack' : f.session.packer, |
|
343 | 'pack' : f.session.packer, | |
349 | 'unpack' : f.session.unpacker, |
|
344 | 'unpack' : f.session.unpacker, | |
350 | } |
|
345 | } | |
351 |
|
346 | |||
352 | cdict = {'ssh' : self.ssh_server} |
|
347 | cdict = {'ssh' : self.ssh_server} | |
353 | cdict.update(f.client_info) |
|
348 | cdict.update(f.client_info) | |
354 | cdict.update(base) |
|
349 | cdict.update(base) | |
355 | self.save_connection_dict(self.client_json_file, cdict) |
|
350 | self.save_connection_dict(self.client_json_file, cdict) | |
356 |
|
351 | |||
357 | edict = {'ssh' : self.engine_ssh_server} |
|
352 | edict = {'ssh' : self.engine_ssh_server} | |
358 | edict.update(f.engine_info) |
|
353 | edict.update(f.engine_info) | |
359 | edict.update(base) |
|
354 | edict.update(base) | |
360 | self.save_connection_dict(self.engine_json_file, edict) |
|
355 | self.save_connection_dict(self.engine_json_file, edict) | |
361 |
|
356 | |||
362 | fname = "engines%s.json" % self.cluster_id |
|
357 | fname = "engines%s.json" % self.cluster_id | |
363 | self.factory.hub.engine_state_file = os.path.join(self.profile_dir.log_dir, fname) |
|
358 | self.factory.hub.engine_state_file = os.path.join(self.profile_dir.log_dir, fname) | |
364 | if self.restore_engines: |
|
359 | if self.restore_engines: | |
365 | self.factory.hub._load_engine_state() |
|
360 | self.factory.hub._load_engine_state() | |
366 |
|
361 | |||
367 | def init_schedulers(self): |
|
362 | def init_schedulers(self): | |
368 | children = self.children |
|
363 | children = self.children | |
369 | mq = import_item(str(self.mq_class)) |
|
364 | mq = import_item(str(self.mq_class)) | |
370 |
|
365 | |||
371 | f = self.factory |
|
366 | f = self.factory | |
372 | ident = f.session.bsession |
|
367 | ident = f.session.bsession | |
373 | # disambiguate url, in case of * |
|
368 | # disambiguate url, in case of * | |
374 | monitor_url = disambiguate_url(f.monitor_url) |
|
369 | monitor_url = disambiguate_url(f.monitor_url) | |
375 | # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url |
|
370 | # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url | |
376 | # IOPub relay (in a Process) |
|
371 | # IOPub relay (in a Process) | |
377 | q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A',b'iopub') |
|
372 | q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A',b'iopub') | |
378 | q.bind_in(f.client_url('iopub')) |
|
373 | q.bind_in(f.client_url('iopub')) | |
379 | q.setsockopt_in(zmq.IDENTITY, ident + b"_iopub") |
|
374 | q.setsockopt_in(zmq.IDENTITY, ident + b"_iopub") | |
380 | q.bind_out(f.engine_url('iopub')) |
|
375 | q.bind_out(f.engine_url('iopub')) | |
381 | q.setsockopt_out(zmq.SUBSCRIBE, b'') |
|
376 | q.setsockopt_out(zmq.SUBSCRIBE, b'') | |
382 | q.connect_mon(monitor_url) |
|
377 | q.connect_mon(monitor_url) | |
383 | q.daemon=True |
|
378 | q.daemon=True | |
384 | children.append(q) |
|
379 | children.append(q) | |
385 |
|
380 | |||
386 | # Multiplexer Queue (in a Process) |
|
381 | # Multiplexer Queue (in a Process) | |
387 | q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out') |
|
382 | q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out') | |
388 |
|
383 | |||
389 | q.bind_in(f.client_url('mux')) |
|
384 | q.bind_in(f.client_url('mux')) | |
390 | q.setsockopt_in(zmq.IDENTITY, b'mux_in') |
|
385 | q.setsockopt_in(zmq.IDENTITY, b'mux_in') | |
391 | q.bind_out(f.engine_url('mux')) |
|
386 | q.bind_out(f.engine_url('mux')) | |
392 | q.setsockopt_out(zmq.IDENTITY, b'mux_out') |
|
387 | q.setsockopt_out(zmq.IDENTITY, b'mux_out') | |
393 | q.connect_mon(monitor_url) |
|
388 | q.connect_mon(monitor_url) | |
394 | q.daemon=True |
|
389 | q.daemon=True | |
395 | children.append(q) |
|
390 | children.append(q) | |
396 |
|
391 | |||
397 | # Control Queue (in a Process) |
|
392 | # Control Queue (in a Process) | |
398 | q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol') |
|
393 | q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol') | |
399 | q.bind_in(f.client_url('control')) |
|
394 | q.bind_in(f.client_url('control')) | |
400 | q.setsockopt_in(zmq.IDENTITY, b'control_in') |
|
395 | q.setsockopt_in(zmq.IDENTITY, b'control_in') | |
401 | q.bind_out(f.engine_url('control')) |
|
396 | q.bind_out(f.engine_url('control')) | |
402 | q.setsockopt_out(zmq.IDENTITY, b'control_out') |
|
397 | q.setsockopt_out(zmq.IDENTITY, b'control_out') | |
403 | q.connect_mon(monitor_url) |
|
398 | q.connect_mon(monitor_url) | |
404 | q.daemon=True |
|
399 | q.daemon=True | |
405 | children.append(q) |
|
400 | children.append(q) | |
406 | try: |
|
401 | try: | |
407 | scheme = self.config.TaskScheduler.scheme_name |
|
402 | scheme = self.config.TaskScheduler.scheme_name | |
408 | except AttributeError: |
|
403 | except AttributeError: | |
409 | scheme = TaskScheduler.scheme_name.get_default_value() |
|
404 | scheme = TaskScheduler.scheme_name.get_default_value() | |
410 | # Task Queue (in a Process) |
|
405 | # Task Queue (in a Process) | |
411 | if scheme == 'pure': |
|
406 | if scheme == 'pure': | |
412 | self.log.warn("task::using pure DEALER Task scheduler") |
|
407 | self.log.warn("task::using pure DEALER Task scheduler") | |
413 | q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask') |
|
408 | q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask') | |
414 | # q.setsockopt_out(zmq.HWM, hub.hwm) |
|
409 | # q.setsockopt_out(zmq.HWM, hub.hwm) | |
415 | q.bind_in(f.client_url('task')) |
|
410 | q.bind_in(f.client_url('task')) | |
416 | q.setsockopt_in(zmq.IDENTITY, b'task_in') |
|
411 | q.setsockopt_in(zmq.IDENTITY, b'task_in') | |
417 | q.bind_out(f.engine_url('task')) |
|
412 | q.bind_out(f.engine_url('task')) | |
418 | q.setsockopt_out(zmq.IDENTITY, b'task_out') |
|
413 | q.setsockopt_out(zmq.IDENTITY, b'task_out') | |
419 | q.connect_mon(monitor_url) |
|
414 | q.connect_mon(monitor_url) | |
420 | q.daemon=True |
|
415 | q.daemon=True | |
421 | children.append(q) |
|
416 | children.append(q) | |
422 | elif scheme == 'none': |
|
417 | elif scheme == 'none': | |
423 | self.log.warn("task::using no Task scheduler") |
|
418 | self.log.warn("task::using no Task scheduler") | |
424 |
|
419 | |||
425 | else: |
|
420 | else: | |
426 | self.log.info("task::using Python %s Task scheduler"%scheme) |
|
421 | self.log.info("task::using Python %s Task scheduler"%scheme) | |
427 | sargs = (f.client_url('task'), f.engine_url('task'), |
|
422 | sargs = (f.client_url('task'), f.engine_url('task'), | |
428 | monitor_url, disambiguate_url(f.client_url('notification')), |
|
423 | monitor_url, disambiguate_url(f.client_url('notification')), | |
429 | disambiguate_url(f.client_url('registration')), |
|
424 | disambiguate_url(f.client_url('registration')), | |
430 | ) |
|
425 | ) | |
431 | kwargs = dict(logname='scheduler', loglevel=self.log_level, |
|
426 | kwargs = dict(logname='scheduler', loglevel=self.log_level, | |
432 | log_url = self.log_url, config=dict(self.config)) |
|
427 | log_url = self.log_url, config=dict(self.config)) | |
433 | if 'Process' in self.mq_class: |
|
428 | if 'Process' in self.mq_class: | |
434 | # run the Python scheduler in a Process |
|
429 | # run the Python scheduler in a Process | |
435 | q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs) |
|
430 | q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs) | |
436 | q.daemon=True |
|
431 | q.daemon=True | |
437 | children.append(q) |
|
432 | children.append(q) | |
438 | else: |
|
433 | else: | |
439 | # single-threaded Controller |
|
434 | # single-threaded Controller | |
440 | kwargs['in_thread'] = True |
|
435 | kwargs['in_thread'] = True | |
441 | launch_scheduler(*sargs, **kwargs) |
|
436 | launch_scheduler(*sargs, **kwargs) | |
442 |
|
437 | |||
443 | # set unlimited HWM for all relay devices |
|
438 | # set unlimited HWM for all relay devices | |
444 | if hasattr(zmq, 'SNDHWM'): |
|
439 | if hasattr(zmq, 'SNDHWM'): | |
445 | q = children[0] |
|
440 | q = children[0] | |
446 | q.setsockopt_in(zmq.RCVHWM, 0) |
|
441 | q.setsockopt_in(zmq.RCVHWM, 0) | |
447 | q.setsockopt_out(zmq.SNDHWM, 0) |
|
442 | q.setsockopt_out(zmq.SNDHWM, 0) | |
448 |
|
443 | |||
449 | for q in children[1:]: |
|
444 | for q in children[1:]: | |
450 | if not hasattr(q, 'setsockopt_in'): |
|
445 | if not hasattr(q, 'setsockopt_in'): | |
451 | continue |
|
446 | continue | |
452 | q.setsockopt_in(zmq.SNDHWM, 0) |
|
447 | q.setsockopt_in(zmq.SNDHWM, 0) | |
453 | q.setsockopt_in(zmq.RCVHWM, 0) |
|
448 | q.setsockopt_in(zmq.RCVHWM, 0) | |
454 | q.setsockopt_out(zmq.SNDHWM, 0) |
|
449 | q.setsockopt_out(zmq.SNDHWM, 0) | |
455 | q.setsockopt_out(zmq.RCVHWM, 0) |
|
450 | q.setsockopt_out(zmq.RCVHWM, 0) | |
456 | q.setsockopt_mon(zmq.SNDHWM, 0) |
|
451 | q.setsockopt_mon(zmq.SNDHWM, 0) | |
457 |
|
452 | |||
458 |
|
453 | |||
459 | def terminate_children(self): |
|
454 | def terminate_children(self): | |
460 | child_procs = [] |
|
455 | child_procs = [] | |
461 | for child in self.children: |
|
456 | for child in self.children: | |
462 | if isinstance(child, ProcessMonitoredQueue): |
|
457 | if isinstance(child, ProcessMonitoredQueue): | |
463 | child_procs.append(child.launcher) |
|
458 | child_procs.append(child.launcher) | |
464 | elif isinstance(child, Process): |
|
459 | elif isinstance(child, Process): | |
465 | child_procs.append(child) |
|
460 | child_procs.append(child) | |
466 | if child_procs: |
|
461 | if child_procs: | |
467 | self.log.critical("terminating children...") |
|
462 | self.log.critical("terminating children...") | |
468 | for child in child_procs: |
|
463 | for child in child_procs: | |
469 | try: |
|
464 | try: | |
470 | child.terminate() |
|
465 | child.terminate() | |
471 | except OSError: |
|
466 | except OSError: | |
472 | # already dead |
|
467 | # already dead | |
473 | pass |
|
468 | pass | |
474 |
|
469 | |||
475 | def handle_signal(self, sig, frame): |
|
470 | def handle_signal(self, sig, frame): | |
476 | self.log.critical("Received signal %i, shutting down", sig) |
|
471 | self.log.critical("Received signal %i, shutting down", sig) | |
477 | self.terminate_children() |
|
472 | self.terminate_children() | |
478 | self.loop.stop() |
|
473 | self.loop.stop() | |
479 |
|
474 | |||
480 | def init_signal(self): |
|
475 | def init_signal(self): | |
481 | for sig in (SIGINT, SIGABRT, SIGTERM): |
|
476 | for sig in (SIGINT, SIGABRT, SIGTERM): | |
482 | signal(sig, self.handle_signal) |
|
477 | signal(sig, self.handle_signal) | |
483 |
|
478 | |||
484 | def do_import_statements(self): |
|
479 | def do_import_statements(self): | |
485 | statements = self.import_statements |
|
480 | statements = self.import_statements | |
486 | for s in statements: |
|
481 | for s in statements: | |
487 | try: |
|
482 | try: | |
488 | self.log.msg("Executing statement: '%s'" % s) |
|
483 | self.log.msg("Executing statement: '%s'" % s) | |
489 | exec s in globals(), locals() |
|
484 | exec s in globals(), locals() | |
490 | except: |
|
485 | except: | |
491 | self.log.msg("Error running statement: %s" % s) |
|
486 | self.log.msg("Error running statement: %s" % s) | |
492 |
|
487 | |||
493 | def forward_logging(self): |
|
488 | def forward_logging(self): | |
494 | if self.log_url: |
|
489 | if self.log_url: | |
495 | self.log.info("Forwarding logging to %s"%self.log_url) |
|
490 | self.log.info("Forwarding logging to %s"%self.log_url) | |
496 | context = zmq.Context.instance() |
|
491 | context = zmq.Context.instance() | |
497 | lsock = context.socket(zmq.PUB) |
|
492 | lsock = context.socket(zmq.PUB) | |
498 | lsock.connect(self.log_url) |
|
493 | lsock.connect(self.log_url) | |
499 | handler = PUBHandler(lsock) |
|
494 | handler = PUBHandler(lsock) | |
500 | handler.root_topic = 'controller' |
|
495 | handler.root_topic = 'controller' | |
501 | handler.setLevel(self.log_level) |
|
496 | handler.setLevel(self.log_level) | |
502 | self.log.addHandler(handler) |
|
497 | self.log.addHandler(handler) | |
503 |
|
498 | |||
504 | @catch_config_error |
|
499 | @catch_config_error | |
505 | def initialize(self, argv=None): |
|
500 | def initialize(self, argv=None): | |
506 | super(IPControllerApp, self).initialize(argv) |
|
501 | super(IPControllerApp, self).initialize(argv) | |
507 | self.forward_logging() |
|
502 | self.forward_logging() | |
508 | self.load_secondary_config() |
|
503 | self.load_secondary_config() | |
509 | self.init_hub() |
|
504 | self.init_hub() | |
510 | self.init_schedulers() |
|
505 | self.init_schedulers() | |
511 |
|
506 | |||
512 | def start(self): |
|
507 | def start(self): | |
513 | # Start the subprocesses: |
|
508 | # Start the subprocesses: | |
514 | self.factory.start() |
|
509 | self.factory.start() | |
515 | # children must be started before signals are setup, |
|
510 | # children must be started before signals are setup, | |
516 | # otherwise signal-handling will fire multiple times |
|
511 | # otherwise signal-handling will fire multiple times | |
517 | for child in self.children: |
|
512 | for child in self.children: | |
518 | child.start() |
|
513 | child.start() | |
519 | self.init_signal() |
|
514 | self.init_signal() | |
520 |
|
515 | |||
521 | self.write_pid_file(overwrite=True) |
|
516 | self.write_pid_file(overwrite=True) | |
522 |
|
517 | |||
523 | try: |
|
518 | try: | |
524 | self.factory.loop.start() |
|
519 | self.factory.loop.start() | |
525 | except KeyboardInterrupt: |
|
520 | except KeyboardInterrupt: | |
526 | self.log.critical("Interrupted, Exiting...\n") |
|
521 | self.log.critical("Interrupted, Exiting...\n") | |
527 | finally: |
|
522 | finally: | |
528 | self.cleanup_connection_files() |
|
523 | self.cleanup_connection_files() | |
529 |
|
524 | |||
530 |
|
525 | |||
531 | def launch_new_instance(*args, **kwargs): |
|
526 | def launch_new_instance(*args, **kwargs): | |
532 | """Create and run the IPython controller""" |
|
527 | """Create and run the IPython controller""" | |
533 | if sys.platform == 'win32': |
|
528 | if sys.platform == 'win32': | |
534 | # make sure we don't get called from a multiprocessing subprocess |
|
529 | # make sure we don't get called from a multiprocessing subprocess | |
535 | # this can result in infinite Controllers being started on Windows |
|
530 | # this can result in infinite Controllers being started on Windows | |
536 | # which doesn't have a proper fork, so multiprocessing is wonky |
|
531 | # which doesn't have a proper fork, so multiprocessing is wonky | |
537 |
|
532 | |||
538 | # this only comes up when IPython has been installed using vanilla |
|
533 | # this only comes up when IPython has been installed using vanilla | |
539 | # setuptools, and *not* distribute. |
|
534 | # setuptools, and *not* distribute. | |
540 | import multiprocessing |
|
535 | import multiprocessing | |
541 | p = multiprocessing.current_process() |
|
536 | p = multiprocessing.current_process() | |
542 | # the main process has name 'MainProcess' |
|
537 | # the main process has name 'MainProcess' | |
543 | # subprocesses will have names like 'Process-1' |
|
538 | # subprocesses will have names like 'Process-1' | |
544 | if p.name != 'MainProcess': |
|
539 | if p.name != 'MainProcess': | |
545 | # we are a subprocess, don't start another Controller! |
|
540 | # we are a subprocess, don't start another Controller! | |
546 | return |
|
541 | return | |
547 | return IPControllerApp.launch_instance(*args, **kwargs) |
|
542 | return IPControllerApp.launch_instance(*args, **kwargs) | |
548 |
|
543 | |||
549 |
|
544 | |||
550 | if __name__ == '__main__': |
|
545 | if __name__ == '__main__': | |
551 | launch_new_instance() |
|
546 | launch_new_instance() |
@@ -1,396 +1,392 | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 | """ |
|
3 | """ | |
4 | The IPython engine application |
|
4 | The IPython engine application | |
5 |
|
5 | |||
6 | Authors: |
|
6 | Authors: | |
7 |
|
7 | |||
8 | * Brian Granger |
|
8 | * Brian Granger | |
9 | * MinRK |
|
9 | * MinRK | |
10 |
|
10 | |||
11 | """ |
|
11 | """ | |
12 |
|
12 | |||
13 | #----------------------------------------------------------------------------- |
|
13 | #----------------------------------------------------------------------------- | |
14 | # Copyright (C) 2008-2011 The IPython Development Team |
|
14 | # Copyright (C) 2008-2011 The IPython Development Team | |
15 | # |
|
15 | # | |
16 | # Distributed under the terms of the BSD License. The full license is in |
|
16 | # Distributed under the terms of the BSD License. The full license is in | |
17 | # the file COPYING, distributed as part of this software. |
|
17 | # the file COPYING, distributed as part of this software. | |
18 | #----------------------------------------------------------------------------- |
|
18 | #----------------------------------------------------------------------------- | |
19 |
|
19 | |||
20 | #----------------------------------------------------------------------------- |
|
20 | #----------------------------------------------------------------------------- | |
21 | # Imports |
|
21 | # Imports | |
22 | #----------------------------------------------------------------------------- |
|
22 | #----------------------------------------------------------------------------- | |
23 |
|
23 | |||
24 | import json |
|
24 | import json | |
25 | import os |
|
25 | import os | |
26 | import sys |
|
26 | import sys | |
27 | import time |
|
27 | import time | |
28 |
|
28 | |||
29 | import zmq |
|
29 | import zmq | |
30 | from zmq.eventloop import ioloop |
|
30 | from zmq.eventloop import ioloop | |
31 |
|
31 | |||
32 | from IPython.core.profiledir import ProfileDir |
|
32 | from IPython.core.profiledir import ProfileDir | |
33 | from IPython.parallel.apps.baseapp import ( |
|
33 | from IPython.parallel.apps.baseapp import ( | |
34 | BaseParallelApplication, |
|
34 | BaseParallelApplication, | |
35 | base_aliases, |
|
35 | base_aliases, | |
36 | base_flags, |
|
36 | base_flags, | |
37 | catch_config_error, |
|
37 | catch_config_error, | |
38 | ) |
|
38 | ) | |
39 | from IPython.kernel.zmq.log import EnginePUBHandler |
|
39 | from IPython.kernel.zmq.log import EnginePUBHandler | |
40 | from IPython.kernel.zmq.ipkernel import Kernel |
|
40 | from IPython.kernel.zmq.ipkernel import Kernel | |
41 | from IPython.kernel.zmq.kernelapp import IPKernelApp |
|
41 | from IPython.kernel.zmq.kernelapp import IPKernelApp | |
42 | from IPython.kernel.zmq.session import ( |
|
42 | from IPython.kernel.zmq.session import ( | |
43 | Session, session_aliases, session_flags |
|
43 | Session, session_aliases, session_flags | |
44 | ) |
|
44 | ) | |
45 | from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell |
|
45 | from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell | |
46 |
|
46 | |||
47 | from IPython.config.configurable import Configurable |
|
47 | from IPython.config.configurable import Configurable | |
48 |
|
48 | |||
49 | from IPython.parallel.engine.engine import EngineFactory |
|
49 | from IPython.parallel.engine.engine import EngineFactory | |
50 | from IPython.parallel.util import disambiguate_ip_address |
|
50 | from IPython.parallel.util import disambiguate_ip_address | |
51 |
|
51 | |||
52 | from IPython.utils.importstring import import_item |
|
52 | from IPython.utils.importstring import import_item | |
53 | from IPython.utils.py3compat import cast_bytes |
|
53 | from IPython.utils.py3compat import cast_bytes | |
54 | from IPython.utils.traitlets import Bool, Unicode, Dict, List, Float, Instance |
|
54 | from IPython.utils.traitlets import Bool, Unicode, Dict, List, Float, Instance | |
55 |
|
55 | |||
56 |
|
56 | |||
57 | #----------------------------------------------------------------------------- |
|
57 | #----------------------------------------------------------------------------- | |
58 | # Module level variables |
|
58 | # Module level variables | |
59 | #----------------------------------------------------------------------------- |
|
59 | #----------------------------------------------------------------------------- | |
60 |
|
60 | |||
61 | #: The default config file name for this application |
|
|||
62 | default_config_file_name = u'ipengine_config.py' |
|
|||
63 |
|
||||
64 | _description = """Start an IPython engine for parallel computing. |
|
61 | _description = """Start an IPython engine for parallel computing. | |
65 |
|
62 | |||
66 | IPython engines run in parallel and perform computations on behalf of a client |
|
63 | IPython engines run in parallel and perform computations on behalf of a client | |
67 | and controller. A controller needs to be started before the engines. The |
|
64 | and controller. A controller needs to be started before the engines. The | |
68 | engine can be configured using command line options or using a cluster |
|
65 | engine can be configured using command line options or using a cluster | |
69 | directory. Cluster directories contain config, log and security files and are |
|
66 | directory. Cluster directories contain config, log and security files and are | |
70 | usually located in your ipython directory and named as "profile_name". |
|
67 | usually located in your ipython directory and named as "profile_name". | |
71 | See the `profile` and `profile-dir` options for details. |
|
68 | See the `profile` and `profile-dir` options for details. | |
72 | """ |
|
69 | """ | |
73 |
|
70 | |||
74 | _examples = """ |
|
71 | _examples = """ | |
75 | ipengine --ip=192.168.0.1 --port=1000 # connect to hub at ip and port |
|
72 | ipengine --ip=192.168.0.1 --port=1000 # connect to hub at ip and port | |
76 | ipengine --log-to-file --log-level=DEBUG # log to a file with DEBUG verbosity |
|
73 | ipengine --log-to-file --log-level=DEBUG # log to a file with DEBUG verbosity | |
77 | """ |
|
74 | """ | |
78 |
|
75 | |||
79 | #----------------------------------------------------------------------------- |
|
76 | #----------------------------------------------------------------------------- | |
80 | # MPI configuration |
|
77 | # MPI configuration | |
81 | #----------------------------------------------------------------------------- |
|
78 | #----------------------------------------------------------------------------- | |
82 |
|
79 | |||
83 | mpi4py_init = """from mpi4py import MPI as mpi |
|
80 | mpi4py_init = """from mpi4py import MPI as mpi | |
84 | mpi.size = mpi.COMM_WORLD.Get_size() |
|
81 | mpi.size = mpi.COMM_WORLD.Get_size() | |
85 | mpi.rank = mpi.COMM_WORLD.Get_rank() |
|
82 | mpi.rank = mpi.COMM_WORLD.Get_rank() | |
86 | """ |
|
83 | """ | |
87 |
|
84 | |||
88 |
|
85 | |||
89 | pytrilinos_init = """from PyTrilinos import Epetra |
|
86 | pytrilinos_init = """from PyTrilinos import Epetra | |
90 | class SimpleStruct: |
|
87 | class SimpleStruct: | |
91 | pass |
|
88 | pass | |
92 | mpi = SimpleStruct() |
|
89 | mpi = SimpleStruct() | |
93 | mpi.rank = 0 |
|
90 | mpi.rank = 0 | |
94 | mpi.size = 0 |
|
91 | mpi.size = 0 | |
95 | """ |
|
92 | """ | |
96 |
|
93 | |||
97 | class MPI(Configurable): |
|
94 | class MPI(Configurable): | |
98 | """Configurable for MPI initialization""" |
|
95 | """Configurable for MPI initialization""" | |
99 | use = Unicode('', config=True, |
|
96 | use = Unicode('', config=True, | |
100 | help='How to enable MPI (mpi4py, pytrilinos, or empty string to disable).' |
|
97 | help='How to enable MPI (mpi4py, pytrilinos, or empty string to disable).' | |
101 | ) |
|
98 | ) | |
102 |
|
99 | |||
103 | def _use_changed(self, name, old, new): |
|
100 | def _use_changed(self, name, old, new): | |
104 | # load default init script if it's not set |
|
101 | # load default init script if it's not set | |
105 | if not self.init_script: |
|
102 | if not self.init_script: | |
106 | self.init_script = self.default_inits.get(new, '') |
|
103 | self.init_script = self.default_inits.get(new, '') | |
107 |
|
104 | |||
108 | init_script = Unicode('', config=True, |
|
105 | init_script = Unicode('', config=True, | |
109 | help="Initialization code for MPI") |
|
106 | help="Initialization code for MPI") | |
110 |
|
107 | |||
111 | default_inits = Dict({'mpi4py' : mpi4py_init, 'pytrilinos':pytrilinos_init}, |
|
108 | default_inits = Dict({'mpi4py' : mpi4py_init, 'pytrilinos':pytrilinos_init}, | |
112 | config=True) |
|
109 | config=True) | |
113 |
|
110 | |||
114 |
|
111 | |||
115 | #----------------------------------------------------------------------------- |
|
112 | #----------------------------------------------------------------------------- | |
116 | # Main application |
|
113 | # Main application | |
117 | #----------------------------------------------------------------------------- |
|
114 | #----------------------------------------------------------------------------- | |
118 | aliases = dict( |
|
115 | aliases = dict( | |
119 | file = 'IPEngineApp.url_file', |
|
116 | file = 'IPEngineApp.url_file', | |
120 | c = 'IPEngineApp.startup_command', |
|
117 | c = 'IPEngineApp.startup_command', | |
121 | s = 'IPEngineApp.startup_script', |
|
118 | s = 'IPEngineApp.startup_script', | |
122 |
|
119 | |||
123 | url = 'EngineFactory.url', |
|
120 | url = 'EngineFactory.url', | |
124 | ssh = 'EngineFactory.sshserver', |
|
121 | ssh = 'EngineFactory.sshserver', | |
125 | sshkey = 'EngineFactory.sshkey', |
|
122 | sshkey = 'EngineFactory.sshkey', | |
126 | ip = 'EngineFactory.ip', |
|
123 | ip = 'EngineFactory.ip', | |
127 | transport = 'EngineFactory.transport', |
|
124 | transport = 'EngineFactory.transport', | |
128 | port = 'EngineFactory.regport', |
|
125 | port = 'EngineFactory.regport', | |
129 | location = 'EngineFactory.location', |
|
126 | location = 'EngineFactory.location', | |
130 |
|
127 | |||
131 | timeout = 'EngineFactory.timeout', |
|
128 | timeout = 'EngineFactory.timeout', | |
132 |
|
129 | |||
133 | mpi = 'MPI.use', |
|
130 | mpi = 'MPI.use', | |
134 |
|
131 | |||
135 | ) |
|
132 | ) | |
136 | aliases.update(base_aliases) |
|
133 | aliases.update(base_aliases) | |
137 | aliases.update(session_aliases) |
|
134 | aliases.update(session_aliases) | |
138 | flags = {} |
|
135 | flags = {} | |
139 | flags.update(base_flags) |
|
136 | flags.update(base_flags) | |
140 | flags.update(session_flags) |
|
137 | flags.update(session_flags) | |
141 |
|
138 | |||
142 | class IPEngineApp(BaseParallelApplication): |
|
139 | class IPEngineApp(BaseParallelApplication): | |
143 |
|
140 | |||
144 | name = 'ipengine' |
|
141 | name = 'ipengine' | |
145 | description = _description |
|
142 | description = _description | |
146 | examples = _examples |
|
143 | examples = _examples | |
147 | config_file_name = Unicode(default_config_file_name) |
|
|||
148 | classes = List([ZMQInteractiveShell, ProfileDir, Session, EngineFactory, Kernel, MPI]) |
|
144 | classes = List([ZMQInteractiveShell, ProfileDir, Session, EngineFactory, Kernel, MPI]) | |
149 |
|
145 | |||
150 | startup_script = Unicode(u'', config=True, |
|
146 | startup_script = Unicode(u'', config=True, | |
151 | help='specify a script to be run at startup') |
|
147 | help='specify a script to be run at startup') | |
152 | startup_command = Unicode('', config=True, |
|
148 | startup_command = Unicode('', config=True, | |
153 | help='specify a command to be run at startup') |
|
149 | help='specify a command to be run at startup') | |
154 |
|
150 | |||
155 | url_file = Unicode(u'', config=True, |
|
151 | url_file = Unicode(u'', config=True, | |
156 | help="""The full location of the file containing the connection information for |
|
152 | help="""The full location of the file containing the connection information for | |
157 | the controller. If this is not given, the file must be in the |
|
153 | the controller. If this is not given, the file must be in the | |
158 | security directory of the cluster directory. This location is |
|
154 | security directory of the cluster directory. This location is | |
159 | resolved using the `profile` or `profile_dir` options.""", |
|
155 | resolved using the `profile` or `profile_dir` options.""", | |
160 | ) |
|
156 | ) | |
161 | wait_for_url_file = Float(5, config=True, |
|
157 | wait_for_url_file = Float(5, config=True, | |
162 | help="""The maximum number of seconds to wait for url_file to exist. |
|
158 | help="""The maximum number of seconds to wait for url_file to exist. | |
163 | This is useful for batch-systems and shared-filesystems where the |
|
159 | This is useful for batch-systems and shared-filesystems where the | |
164 | controller and engine are started at the same time and it |
|
160 | controller and engine are started at the same time and it | |
165 | may take a moment for the controller to write the connector files.""") |
|
161 | may take a moment for the controller to write the connector files.""") | |
166 |
|
162 | |||
167 | url_file_name = Unicode(u'ipcontroller-engine.json', config=True) |
|
163 | url_file_name = Unicode(u'ipcontroller-engine.json', config=True) | |
168 |
|
164 | |||
169 | def _cluster_id_changed(self, name, old, new): |
|
165 | def _cluster_id_changed(self, name, old, new): | |
170 | if new: |
|
166 | if new: | |
171 | base = 'ipcontroller-%s' % new |
|
167 | base = 'ipcontroller-%s' % new | |
172 | else: |
|
168 | else: | |
173 | base = 'ipcontroller' |
|
169 | base = 'ipcontroller' | |
174 | self.url_file_name = "%s-engine.json" % base |
|
170 | self.url_file_name = "%s-engine.json" % base | |
175 |
|
171 | |||
176 | log_url = Unicode('', config=True, |
|
172 | log_url = Unicode('', config=True, | |
177 | help="""The URL for the iploggerapp instance, for forwarding |
|
173 | help="""The URL for the iploggerapp instance, for forwarding | |
178 | logging to a central location.""") |
|
174 | logging to a central location.""") | |
179 |
|
175 | |||
180 | # an IPKernelApp instance, used to setup listening for shell frontends |
|
176 | # an IPKernelApp instance, used to setup listening for shell frontends | |
181 | kernel_app = Instance(IPKernelApp) |
|
177 | kernel_app = Instance(IPKernelApp) | |
182 |
|
178 | |||
183 | aliases = Dict(aliases) |
|
179 | aliases = Dict(aliases) | |
184 | flags = Dict(flags) |
|
180 | flags = Dict(flags) | |
185 |
|
181 | |||
186 | @property |
|
182 | @property | |
187 | def kernel(self): |
|
183 | def kernel(self): | |
188 | """allow access to the Kernel object, so I look like IPKernelApp""" |
|
184 | """allow access to the Kernel object, so I look like IPKernelApp""" | |
189 | return self.engine.kernel |
|
185 | return self.engine.kernel | |
190 |
|
186 | |||
191 | def find_url_file(self): |
|
187 | def find_url_file(self): | |
192 | """Set the url file. |
|
188 | """Set the url file. | |
193 |
|
189 | |||
194 | Here we don't try to actually see if it exists for is valid as that |
|
190 | Here we don't try to actually see if it exists for is valid as that | |
195 | is hadled by the connection logic. |
|
191 | is hadled by the connection logic. | |
196 | """ |
|
192 | """ | |
197 | config = self.config |
|
193 | config = self.config | |
198 | # Find the actual controller key file |
|
194 | # Find the actual controller key file | |
199 | if not self.url_file: |
|
195 | if not self.url_file: | |
200 | self.url_file = os.path.join( |
|
196 | self.url_file = os.path.join( | |
201 | self.profile_dir.security_dir, |
|
197 | self.profile_dir.security_dir, | |
202 | self.url_file_name |
|
198 | self.url_file_name | |
203 | ) |
|
199 | ) | |
204 |
|
200 | |||
205 | def load_connector_file(self): |
|
201 | def load_connector_file(self): | |
206 | """load config from a JSON connector file, |
|
202 | """load config from a JSON connector file, | |
207 | at a *lower* priority than command-line/config files. |
|
203 | at a *lower* priority than command-line/config files. | |
208 | """ |
|
204 | """ | |
209 |
|
205 | |||
210 | self.log.info("Loading url_file %r", self.url_file) |
|
206 | self.log.info("Loading url_file %r", self.url_file) | |
211 | config = self.config |
|
207 | config = self.config | |
212 |
|
208 | |||
213 | with open(self.url_file) as f: |
|
209 | with open(self.url_file) as f: | |
214 | d = json.loads(f.read()) |
|
210 | d = json.loads(f.read()) | |
215 |
|
211 | |||
216 | # allow hand-override of location for disambiguation |
|
212 | # allow hand-override of location for disambiguation | |
217 | # and ssh-server |
|
213 | # and ssh-server | |
218 | try: |
|
214 | try: | |
219 | config.EngineFactory.location |
|
215 | config.EngineFactory.location | |
220 | except AttributeError: |
|
216 | except AttributeError: | |
221 | config.EngineFactory.location = d['location'] |
|
217 | config.EngineFactory.location = d['location'] | |
222 |
|
218 | |||
223 | try: |
|
219 | try: | |
224 | config.EngineFactory.sshserver |
|
220 | config.EngineFactory.sshserver | |
225 | except AttributeError: |
|
221 | except AttributeError: | |
226 | config.EngineFactory.sshserver = d.get('ssh') |
|
222 | config.EngineFactory.sshserver = d.get('ssh') | |
227 |
|
223 | |||
228 | location = config.EngineFactory.location |
|
224 | location = config.EngineFactory.location | |
229 |
|
225 | |||
230 | proto, ip = d['interface'].split('://') |
|
226 | proto, ip = d['interface'].split('://') | |
231 | ip = disambiguate_ip_address(ip, location) |
|
227 | ip = disambiguate_ip_address(ip, location) | |
232 | d['interface'] = '%s://%s' % (proto, ip) |
|
228 | d['interface'] = '%s://%s' % (proto, ip) | |
233 |
|
229 | |||
234 | # DO NOT allow override of basic URLs, serialization, or exec_key |
|
230 | # DO NOT allow override of basic URLs, serialization, or exec_key | |
235 | # JSON file takes top priority there |
|
231 | # JSON file takes top priority there | |
236 | config.Session.key = cast_bytes(d['exec_key']) |
|
232 | config.Session.key = cast_bytes(d['exec_key']) | |
237 |
|
233 | |||
238 | config.EngineFactory.url = d['interface'] + ':%i' % d['registration'] |
|
234 | config.EngineFactory.url = d['interface'] + ':%i' % d['registration'] | |
239 |
|
235 | |||
240 | config.Session.packer = d['pack'] |
|
236 | config.Session.packer = d['pack'] | |
241 | config.Session.unpacker = d['unpack'] |
|
237 | config.Session.unpacker = d['unpack'] | |
242 |
|
238 | |||
243 | self.log.debug("Config changed:") |
|
239 | self.log.debug("Config changed:") | |
244 | self.log.debug("%r", config) |
|
240 | self.log.debug("%r", config) | |
245 | self.connection_info = d |
|
241 | self.connection_info = d | |
246 |
|
242 | |||
247 | def bind_kernel(self, **kwargs): |
|
243 | def bind_kernel(self, **kwargs): | |
248 | """Promote engine to listening kernel, accessible to frontends.""" |
|
244 | """Promote engine to listening kernel, accessible to frontends.""" | |
249 | if self.kernel_app is not None: |
|
245 | if self.kernel_app is not None: | |
250 | return |
|
246 | return | |
251 |
|
247 | |||
252 | self.log.info("Opening ports for direct connections as an IPython kernel") |
|
248 | self.log.info("Opening ports for direct connections as an IPython kernel") | |
253 |
|
249 | |||
254 | kernel = self.kernel |
|
250 | kernel = self.kernel | |
255 |
|
251 | |||
256 | kwargs.setdefault('config', self.config) |
|
252 | kwargs.setdefault('config', self.config) | |
257 | kwargs.setdefault('log', self.log) |
|
253 | kwargs.setdefault('log', self.log) | |
258 | kwargs.setdefault('profile_dir', self.profile_dir) |
|
254 | kwargs.setdefault('profile_dir', self.profile_dir) | |
259 | kwargs.setdefault('session', self.engine.session) |
|
255 | kwargs.setdefault('session', self.engine.session) | |
260 |
|
256 | |||
261 | app = self.kernel_app = IPKernelApp(**kwargs) |
|
257 | app = self.kernel_app = IPKernelApp(**kwargs) | |
262 |
|
258 | |||
263 | # allow IPKernelApp.instance(): |
|
259 | # allow IPKernelApp.instance(): | |
264 | IPKernelApp._instance = app |
|
260 | IPKernelApp._instance = app | |
265 |
|
261 | |||
266 | app.init_connection_file() |
|
262 | app.init_connection_file() | |
267 | # relevant contents of init_sockets: |
|
263 | # relevant contents of init_sockets: | |
268 |
|
264 | |||
269 | app.shell_port = app._bind_socket(kernel.shell_streams[0], app.shell_port) |
|
265 | app.shell_port = app._bind_socket(kernel.shell_streams[0], app.shell_port) | |
270 | app.log.debug("shell ROUTER Channel on port: %i", app.shell_port) |
|
266 | app.log.debug("shell ROUTER Channel on port: %i", app.shell_port) | |
271 |
|
267 | |||
272 | app.iopub_port = app._bind_socket(kernel.iopub_socket, app.iopub_port) |
|
268 | app.iopub_port = app._bind_socket(kernel.iopub_socket, app.iopub_port) | |
273 | app.log.debug("iopub PUB Channel on port: %i", app.iopub_port) |
|
269 | app.log.debug("iopub PUB Channel on port: %i", app.iopub_port) | |
274 |
|
270 | |||
275 | kernel.stdin_socket = self.engine.context.socket(zmq.ROUTER) |
|
271 | kernel.stdin_socket = self.engine.context.socket(zmq.ROUTER) | |
276 | app.stdin_port = app._bind_socket(kernel.stdin_socket, app.stdin_port) |
|
272 | app.stdin_port = app._bind_socket(kernel.stdin_socket, app.stdin_port) | |
277 | app.log.debug("stdin ROUTER Channel on port: %i", app.stdin_port) |
|
273 | app.log.debug("stdin ROUTER Channel on port: %i", app.stdin_port) | |
278 |
|
274 | |||
279 | # start the heartbeat, and log connection info: |
|
275 | # start the heartbeat, and log connection info: | |
280 |
|
276 | |||
281 | app.init_heartbeat() |
|
277 | app.init_heartbeat() | |
282 |
|
278 | |||
283 | app.log_connection_info() |
|
279 | app.log_connection_info() | |
284 | app.write_connection_file() |
|
280 | app.write_connection_file() | |
285 |
|
281 | |||
286 |
|
282 | |||
287 | def init_engine(self): |
|
283 | def init_engine(self): | |
288 | # This is the working dir by now. |
|
284 | # This is the working dir by now. | |
289 | sys.path.insert(0, '') |
|
285 | sys.path.insert(0, '') | |
290 | config = self.config |
|
286 | config = self.config | |
291 | # print config |
|
287 | # print config | |
292 | self.find_url_file() |
|
288 | self.find_url_file() | |
293 |
|
289 | |||
294 | # was the url manually specified? |
|
290 | # was the url manually specified? | |
295 | keys = set(self.config.EngineFactory.keys()) |
|
291 | keys = set(self.config.EngineFactory.keys()) | |
296 | keys = keys.union(set(self.config.RegistrationFactory.keys())) |
|
292 | keys = keys.union(set(self.config.RegistrationFactory.keys())) | |
297 |
|
293 | |||
298 | if keys.intersection(set(['ip', 'url', 'port'])): |
|
294 | if keys.intersection(set(['ip', 'url', 'port'])): | |
299 | # Connection info was specified, don't wait for the file |
|
295 | # Connection info was specified, don't wait for the file | |
300 | url_specified = True |
|
296 | url_specified = True | |
301 | self.wait_for_url_file = 0 |
|
297 | self.wait_for_url_file = 0 | |
302 | else: |
|
298 | else: | |
303 | url_specified = False |
|
299 | url_specified = False | |
304 |
|
300 | |||
305 | if self.wait_for_url_file and not os.path.exists(self.url_file): |
|
301 | if self.wait_for_url_file and not os.path.exists(self.url_file): | |
306 | self.log.warn("url_file %r not found", self.url_file) |
|
302 | self.log.warn("url_file %r not found", self.url_file) | |
307 | self.log.warn("Waiting up to %.1f seconds for it to arrive.", self.wait_for_url_file) |
|
303 | self.log.warn("Waiting up to %.1f seconds for it to arrive.", self.wait_for_url_file) | |
308 | tic = time.time() |
|
304 | tic = time.time() | |
309 | while not os.path.exists(self.url_file) and (time.time()-tic < self.wait_for_url_file): |
|
305 | while not os.path.exists(self.url_file) and (time.time()-tic < self.wait_for_url_file): | |
310 | # wait for url_file to exist, or until time limit |
|
306 | # wait for url_file to exist, or until time limit | |
311 | time.sleep(0.1) |
|
307 | time.sleep(0.1) | |
312 |
|
308 | |||
313 | if os.path.exists(self.url_file): |
|
309 | if os.path.exists(self.url_file): | |
314 | self.load_connector_file() |
|
310 | self.load_connector_file() | |
315 | elif not url_specified: |
|
311 | elif not url_specified: | |
316 | self.log.fatal("Fatal: url file never arrived: %s", self.url_file) |
|
312 | self.log.fatal("Fatal: url file never arrived: %s", self.url_file) | |
317 | self.exit(1) |
|
313 | self.exit(1) | |
318 |
|
314 | |||
319 |
|
315 | |||
320 | try: |
|
316 | try: | |
321 | exec_lines = config.IPKernelApp.exec_lines |
|
317 | exec_lines = config.IPKernelApp.exec_lines | |
322 | except AttributeError: |
|
318 | except AttributeError: | |
323 | try: |
|
319 | try: | |
324 | exec_lines = config.InteractiveShellApp.exec_lines |
|
320 | exec_lines = config.InteractiveShellApp.exec_lines | |
325 | except AttributeError: |
|
321 | except AttributeError: | |
326 | exec_lines = config.IPKernelApp.exec_lines = [] |
|
322 | exec_lines = config.IPKernelApp.exec_lines = [] | |
327 | try: |
|
323 | try: | |
328 | exec_files = config.IPKernelApp.exec_files |
|
324 | exec_files = config.IPKernelApp.exec_files | |
329 | except AttributeError: |
|
325 | except AttributeError: | |
330 | try: |
|
326 | try: | |
331 | exec_files = config.InteractiveShellApp.exec_files |
|
327 | exec_files = config.InteractiveShellApp.exec_files | |
332 | except AttributeError: |
|
328 | except AttributeError: | |
333 | exec_files = config.IPKernelApp.exec_files = [] |
|
329 | exec_files = config.IPKernelApp.exec_files = [] | |
334 |
|
330 | |||
335 | if self.startup_script: |
|
331 | if self.startup_script: | |
336 | exec_files.append(self.startup_script) |
|
332 | exec_files.append(self.startup_script) | |
337 | if self.startup_command: |
|
333 | if self.startup_command: | |
338 | exec_lines.append(self.startup_command) |
|
334 | exec_lines.append(self.startup_command) | |
339 |
|
335 | |||
340 | # Create the underlying shell class and Engine |
|
336 | # Create the underlying shell class and Engine | |
341 | # shell_class = import_item(self.master_config.Global.shell_class) |
|
337 | # shell_class = import_item(self.master_config.Global.shell_class) | |
342 | # print self.config |
|
338 | # print self.config | |
343 | try: |
|
339 | try: | |
344 | self.engine = EngineFactory(config=config, log=self.log, |
|
340 | self.engine = EngineFactory(config=config, log=self.log, | |
345 | connection_info=self.connection_info, |
|
341 | connection_info=self.connection_info, | |
346 | ) |
|
342 | ) | |
347 | except: |
|
343 | except: | |
348 | self.log.error("Couldn't start the Engine", exc_info=True) |
|
344 | self.log.error("Couldn't start the Engine", exc_info=True) | |
349 | self.exit(1) |
|
345 | self.exit(1) | |
350 |
|
346 | |||
351 | def forward_logging(self): |
|
347 | def forward_logging(self): | |
352 | if self.log_url: |
|
348 | if self.log_url: | |
353 | self.log.info("Forwarding logging to %s", self.log_url) |
|
349 | self.log.info("Forwarding logging to %s", self.log_url) | |
354 | context = self.engine.context |
|
350 | context = self.engine.context | |
355 | lsock = context.socket(zmq.PUB) |
|
351 | lsock = context.socket(zmq.PUB) | |
356 | lsock.connect(self.log_url) |
|
352 | lsock.connect(self.log_url) | |
357 | handler = EnginePUBHandler(self.engine, lsock) |
|
353 | handler = EnginePUBHandler(self.engine, lsock) | |
358 | handler.setLevel(self.log_level) |
|
354 | handler.setLevel(self.log_level) | |
359 | self.log.addHandler(handler) |
|
355 | self.log.addHandler(handler) | |
360 |
|
356 | |||
361 | def init_mpi(self): |
|
357 | def init_mpi(self): | |
362 | global mpi |
|
358 | global mpi | |
363 | self.mpi = MPI(parent=self) |
|
359 | self.mpi = MPI(parent=self) | |
364 |
|
360 | |||
365 | mpi_import_statement = self.mpi.init_script |
|
361 | mpi_import_statement = self.mpi.init_script | |
366 | if mpi_import_statement: |
|
362 | if mpi_import_statement: | |
367 | try: |
|
363 | try: | |
368 | self.log.info("Initializing MPI:") |
|
364 | self.log.info("Initializing MPI:") | |
369 | self.log.info(mpi_import_statement) |
|
365 | self.log.info(mpi_import_statement) | |
370 | exec mpi_import_statement in globals() |
|
366 | exec mpi_import_statement in globals() | |
371 | except: |
|
367 | except: | |
372 | mpi = None |
|
368 | mpi = None | |
373 | else: |
|
369 | else: | |
374 | mpi = None |
|
370 | mpi = None | |
375 |
|
371 | |||
376 | @catch_config_error |
|
372 | @catch_config_error | |
377 | def initialize(self, argv=None): |
|
373 | def initialize(self, argv=None): | |
378 | super(IPEngineApp, self).initialize(argv) |
|
374 | super(IPEngineApp, self).initialize(argv) | |
379 | self.init_mpi() |
|
375 | self.init_mpi() | |
380 | self.init_engine() |
|
376 | self.init_engine() | |
381 | self.forward_logging() |
|
377 | self.forward_logging() | |
382 |
|
378 | |||
383 | def start(self): |
|
379 | def start(self): | |
384 | self.engine.start() |
|
380 | self.engine.start() | |
385 | try: |
|
381 | try: | |
386 | self.engine.loop.start() |
|
382 | self.engine.loop.start() | |
387 | except KeyboardInterrupt: |
|
383 | except KeyboardInterrupt: | |
388 | self.log.critical("Engine Interrupted, shutting down...\n") |
|
384 | self.log.critical("Engine Interrupted, shutting down...\n") | |
389 |
|
385 | |||
390 |
|
386 | |||
391 | launch_new_instance = IPEngineApp.launch_instance |
|
387 | launch_new_instance = IPEngineApp.launch_instance | |
392 |
|
388 | |||
393 |
|
389 | |||
394 | if __name__ == '__main__': |
|
390 | if __name__ == '__main__': | |
395 | launch_new_instance() |
|
391 | launch_new_instance() | |
396 |
|
392 |
@@ -1,99 +1,95 | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 | """ |
|
3 | """ | |
4 | A simple IPython logger application |
|
4 | A simple IPython logger application | |
5 |
|
5 | |||
6 | Authors: |
|
6 | Authors: | |
7 |
|
7 | |||
8 | * MinRK |
|
8 | * MinRK | |
9 |
|
9 | |||
10 | """ |
|
10 | """ | |
11 |
|
11 | |||
12 | #----------------------------------------------------------------------------- |
|
12 | #----------------------------------------------------------------------------- | |
13 | # Copyright (C) 2011 The IPython Development Team |
|
13 | # Copyright (C) 2011 The IPython Development Team | |
14 | # |
|
14 | # | |
15 | # Distributed under the terms of the BSD License. The full license is in |
|
15 | # Distributed under the terms of the BSD License. The full license is in | |
16 | # the file COPYING, distributed as part of this software. |
|
16 | # the file COPYING, distributed as part of this software. | |
17 | #----------------------------------------------------------------------------- |
|
17 | #----------------------------------------------------------------------------- | |
18 |
|
18 | |||
19 | #----------------------------------------------------------------------------- |
|
19 | #----------------------------------------------------------------------------- | |
20 | # Imports |
|
20 | # Imports | |
21 | #----------------------------------------------------------------------------- |
|
21 | #----------------------------------------------------------------------------- | |
22 |
|
22 | |||
23 | import os |
|
23 | import os | |
24 | import sys |
|
24 | import sys | |
25 |
|
25 | |||
26 | import zmq |
|
26 | import zmq | |
27 |
|
27 | |||
28 | from IPython.core.profiledir import ProfileDir |
|
28 | from IPython.core.profiledir import ProfileDir | |
29 | from IPython.utils.traitlets import Bool, Dict, Unicode |
|
29 | from IPython.utils.traitlets import Bool, Dict, Unicode | |
30 |
|
30 | |||
31 | from IPython.parallel.apps.baseapp import ( |
|
31 | from IPython.parallel.apps.baseapp import ( | |
32 | BaseParallelApplication, |
|
32 | BaseParallelApplication, | |
33 | base_aliases, |
|
33 | base_aliases, | |
34 | catch_config_error, |
|
34 | catch_config_error, | |
35 | ) |
|
35 | ) | |
36 | from IPython.parallel.apps.logwatcher import LogWatcher |
|
36 | from IPython.parallel.apps.logwatcher import LogWatcher | |
37 |
|
37 | |||
38 | #----------------------------------------------------------------------------- |
|
38 | #----------------------------------------------------------------------------- | |
39 | # Module level variables |
|
39 | # Module level variables | |
40 | #----------------------------------------------------------------------------- |
|
40 | #----------------------------------------------------------------------------- | |
41 |
|
41 | |||
42 | #: The default config file name for this application |
|
42 | #: The default config file name for this application | |
43 | default_config_file_name = u'iplogger_config.py' |
|
|||
44 |
|
||||
45 | _description = """Start an IPython logger for parallel computing. |
|
43 | _description = """Start an IPython logger for parallel computing. | |
46 |
|
44 | |||
47 | IPython controllers and engines (and your own processes) can broadcast log messages |
|
45 | IPython controllers and engines (and your own processes) can broadcast log messages | |
48 | by registering a `zmq.log.handlers.PUBHandler` with the `logging` module. The |
|
46 | by registering a `zmq.log.handlers.PUBHandler` with the `logging` module. The | |
49 | logger can be configured using command line options or using a cluster |
|
47 | logger can be configured using command line options or using a cluster | |
50 | directory. Cluster directories contain config, log and security files and are |
|
48 | directory. Cluster directories contain config, log and security files and are | |
51 | usually located in your ipython directory and named as "profile_name". |
|
49 | usually located in your ipython directory and named as "profile_name". | |
52 | See the `profile` and `profile-dir` options for details. |
|
50 | See the `profile` and `profile-dir` options for details. | |
53 | """ |
|
51 | """ | |
54 |
|
52 | |||
55 |
|
53 | |||
56 | #----------------------------------------------------------------------------- |
|
54 | #----------------------------------------------------------------------------- | |
57 | # Main application |
|
55 | # Main application | |
58 | #----------------------------------------------------------------------------- |
|
56 | #----------------------------------------------------------------------------- | |
59 | aliases = {} |
|
57 | aliases = {} | |
60 | aliases.update(base_aliases) |
|
58 | aliases.update(base_aliases) | |
61 | aliases.update(dict(url='LogWatcher.url', topics='LogWatcher.topics')) |
|
59 | aliases.update(dict(url='LogWatcher.url', topics='LogWatcher.topics')) | |
62 |
|
60 | |||
63 | class IPLoggerApp(BaseParallelApplication): |
|
61 | class IPLoggerApp(BaseParallelApplication): | |
64 |
|
62 | |||
65 | name = u'iplogger' |
|
63 | name = u'iplogger' | |
66 | description = _description |
|
64 | description = _description | |
67 | config_file_name = Unicode(default_config_file_name) |
|
|||
68 |
|
||||
69 | classes = [LogWatcher, ProfileDir] |
|
65 | classes = [LogWatcher, ProfileDir] | |
70 | aliases = Dict(aliases) |
|
66 | aliases = Dict(aliases) | |
71 |
|
67 | |||
72 | @catch_config_error |
|
68 | @catch_config_error | |
73 | def initialize(self, argv=None): |
|
69 | def initialize(self, argv=None): | |
74 | super(IPLoggerApp, self).initialize(argv) |
|
70 | super(IPLoggerApp, self).initialize(argv) | |
75 | self.init_watcher() |
|
71 | self.init_watcher() | |
76 |
|
72 | |||
77 | def init_watcher(self): |
|
73 | def init_watcher(self): | |
78 | try: |
|
74 | try: | |
79 | self.watcher = LogWatcher(parent=self, log=self.log) |
|
75 | self.watcher = LogWatcher(parent=self, log=self.log) | |
80 | except: |
|
76 | except: | |
81 | self.log.error("Couldn't start the LogWatcher", exc_info=True) |
|
77 | self.log.error("Couldn't start the LogWatcher", exc_info=True) | |
82 | self.exit(1) |
|
78 | self.exit(1) | |
83 | self.log.info("Listening for log messages on %r"%self.watcher.url) |
|
79 | self.log.info("Listening for log messages on %r"%self.watcher.url) | |
84 |
|
80 | |||
85 |
|
81 | |||
86 | def start(self): |
|
82 | def start(self): | |
87 | self.watcher.start() |
|
83 | self.watcher.start() | |
88 | try: |
|
84 | try: | |
89 | self.watcher.loop.start() |
|
85 | self.watcher.loop.start() | |
90 | except KeyboardInterrupt: |
|
86 | except KeyboardInterrupt: | |
91 | self.log.critical("Logging Interrupted, shutting down...\n") |
|
87 | self.log.critical("Logging Interrupted, shutting down...\n") | |
92 |
|
88 | |||
93 |
|
89 | |||
94 | launch_new_instance = IPLoggerApp.launch_instance |
|
90 | launch_new_instance = IPLoggerApp.launch_instance | |
95 |
|
91 | |||
96 |
|
92 | |||
97 | if __name__ == '__main__': |
|
93 | if __name__ == '__main__': | |
98 | launch_new_instance() |
|
94 | launch_new_instance() | |
99 |
|
95 |
@@ -1,396 +1,392 | |||||
1 | #!/usr/bin/env python |
|
1 | #!/usr/bin/env python | |
2 | # encoding: utf-8 |
|
2 | # encoding: utf-8 | |
3 | """ |
|
3 | """ | |
4 | The :class:`~IPython.core.application.Application` object for the command |
|
4 | The :class:`~IPython.core.application.Application` object for the command | |
5 | line :command:`ipython` program. |
|
5 | line :command:`ipython` program. | |
6 |
|
6 | |||
7 | Authors |
|
7 | Authors | |
8 | ------- |
|
8 | ------- | |
9 |
|
9 | |||
10 | * Brian Granger |
|
10 | * Brian Granger | |
11 | * Fernando Perez |
|
11 | * Fernando Perez | |
12 | * Min Ragan-Kelley |
|
12 | * Min Ragan-Kelley | |
13 | """ |
|
13 | """ | |
14 |
|
14 | |||
15 | #----------------------------------------------------------------------------- |
|
15 | #----------------------------------------------------------------------------- | |
16 | # Copyright (C) 2008-2011 The IPython Development Team |
|
16 | # Copyright (C) 2008-2011 The IPython Development Team | |
17 | # |
|
17 | # | |
18 | # Distributed under the terms of the BSD License. The full license is in |
|
18 | # Distributed under the terms of the BSD License. The full license is in | |
19 | # the file COPYING, distributed as part of this software. |
|
19 | # the file COPYING, distributed as part of this software. | |
20 | #----------------------------------------------------------------------------- |
|
20 | #----------------------------------------------------------------------------- | |
21 |
|
21 | |||
22 | #----------------------------------------------------------------------------- |
|
22 | #----------------------------------------------------------------------------- | |
23 | # Imports |
|
23 | # Imports | |
24 | #----------------------------------------------------------------------------- |
|
24 | #----------------------------------------------------------------------------- | |
25 |
|
25 | |||
26 | from __future__ import absolute_import |
|
26 | from __future__ import absolute_import | |
27 |
|
27 | |||
28 | import logging |
|
28 | import logging | |
29 | import os |
|
29 | import os | |
30 | import sys |
|
30 | import sys | |
31 |
|
31 | |||
32 | from IPython.config.loader import ( |
|
32 | from IPython.config.loader import ( | |
33 | Config, PyFileConfigLoader, ConfigFileNotFound |
|
33 | Config, PyFileConfigLoader, ConfigFileNotFound | |
34 | ) |
|
34 | ) | |
35 | from IPython.config.application import boolean_flag, catch_config_error |
|
35 | from IPython.config.application import boolean_flag, catch_config_error | |
36 | from IPython.core import release |
|
36 | from IPython.core import release | |
37 | from IPython.core import usage |
|
37 | from IPython.core import usage | |
38 | from IPython.core.completer import IPCompleter |
|
38 | from IPython.core.completer import IPCompleter | |
39 | from IPython.core.crashhandler import CrashHandler |
|
39 | from IPython.core.crashhandler import CrashHandler | |
40 | from IPython.core.formatters import PlainTextFormatter |
|
40 | from IPython.core.formatters import PlainTextFormatter | |
41 | from IPython.core.history import HistoryManager |
|
41 | from IPython.core.history import HistoryManager | |
42 | from IPython.core.prompts import PromptManager |
|
42 | from IPython.core.prompts import PromptManager | |
43 | from IPython.core.application import ( |
|
43 | from IPython.core.application import ( | |
44 | ProfileDir, BaseIPythonApplication, base_flags, base_aliases |
|
44 | ProfileDir, BaseIPythonApplication, base_flags, base_aliases | |
45 | ) |
|
45 | ) | |
46 | from IPython.core.magics import ScriptMagics |
|
46 | from IPython.core.magics import ScriptMagics | |
47 | from IPython.core.shellapp import ( |
|
47 | from IPython.core.shellapp import ( | |
48 | InteractiveShellApp, shell_flags, shell_aliases |
|
48 | InteractiveShellApp, shell_flags, shell_aliases | |
49 | ) |
|
49 | ) | |
50 | from IPython.terminal.interactiveshell import TerminalInteractiveShell |
|
50 | from IPython.terminal.interactiveshell import TerminalInteractiveShell | |
51 | from IPython.utils import warn |
|
51 | from IPython.utils import warn | |
52 | from IPython.utils.path import get_ipython_dir, check_for_old_config |
|
52 | from IPython.utils.path import get_ipython_dir, check_for_old_config | |
53 | from IPython.utils.traitlets import ( |
|
53 | from IPython.utils.traitlets import ( | |
54 | Bool, List, Dict, |
|
54 | Bool, List, Dict, | |
55 | ) |
|
55 | ) | |
56 |
|
56 | |||
57 | #----------------------------------------------------------------------------- |
|
57 | #----------------------------------------------------------------------------- | |
58 | # Globals, utilities and helpers |
|
58 | # Globals, utilities and helpers | |
59 | #----------------------------------------------------------------------------- |
|
59 | #----------------------------------------------------------------------------- | |
60 |
|
60 | |||
61 | #: The default config file name for this application. |
|
|||
62 | default_config_file_name = u'ipython_config.py' |
|
|||
63 |
|
||||
64 | _examples = """ |
|
61 | _examples = """ | |
65 | ipython --pylab # start in pylab mode |
|
62 | ipython --pylab # start in pylab mode | |
66 | ipython --pylab=qt # start in pylab mode with the qt4 backend |
|
63 | ipython --pylab=qt # start in pylab mode with the qt4 backend | |
67 | ipython --log-level=DEBUG # set logging to DEBUG |
|
64 | ipython --log-level=DEBUG # set logging to DEBUG | |
68 | ipython --profile=foo # start with profile foo |
|
65 | ipython --profile=foo # start with profile foo | |
69 |
|
66 | |||
70 | ipython qtconsole # start the qtconsole GUI application |
|
67 | ipython qtconsole # start the qtconsole GUI application | |
71 | ipython help qtconsole # show the help for the qtconsole subcmd |
|
68 | ipython help qtconsole # show the help for the qtconsole subcmd | |
72 |
|
69 | |||
73 | ipython console # start the terminal-based console application |
|
70 | ipython console # start the terminal-based console application | |
74 | ipython help console # show the help for the console subcmd |
|
71 | ipython help console # show the help for the console subcmd | |
75 |
|
72 | |||
76 | ipython notebook # start the IPython notebook |
|
73 | ipython notebook # start the IPython notebook | |
77 | ipython help notebook # show the help for the notebook subcmd |
|
74 | ipython help notebook # show the help for the notebook subcmd | |
78 |
|
75 | |||
79 | ipython profile create foo # create profile foo w/ default config files |
|
76 | ipython profile create foo # create profile foo w/ default config files | |
80 | ipython help profile # show the help for the profile subcmd |
|
77 | ipython help profile # show the help for the profile subcmd | |
81 |
|
78 | |||
82 | ipython locate # print the path to the IPython directory |
|
79 | ipython locate # print the path to the IPython directory | |
83 | ipython locate profile foo # print the path to the directory for profile `foo` |
|
80 | ipython locate profile foo # print the path to the directory for profile `foo` | |
84 |
|
81 | |||
85 | ipython nbconvert # convert notebooks to/from other formats |
|
82 | ipython nbconvert # convert notebooks to/from other formats | |
86 | """ |
|
83 | """ | |
87 |
|
84 | |||
88 | #----------------------------------------------------------------------------- |
|
85 | #----------------------------------------------------------------------------- | |
89 | # Crash handler for this application |
|
86 | # Crash handler for this application | |
90 | #----------------------------------------------------------------------------- |
|
87 | #----------------------------------------------------------------------------- | |
91 |
|
88 | |||
92 | class IPAppCrashHandler(CrashHandler): |
|
89 | class IPAppCrashHandler(CrashHandler): | |
93 | """sys.excepthook for IPython itself, leaves a detailed report on disk.""" |
|
90 | """sys.excepthook for IPython itself, leaves a detailed report on disk.""" | |
94 |
|
91 | |||
95 | def __init__(self, app): |
|
92 | def __init__(self, app): | |
96 | contact_name = release.author |
|
93 | contact_name = release.author | |
97 | contact_email = release.author_email |
|
94 | contact_email = release.author_email | |
98 | bug_tracker = 'https://github.com/ipython/ipython/issues' |
|
95 | bug_tracker = 'https://github.com/ipython/ipython/issues' | |
99 | super(IPAppCrashHandler,self).__init__( |
|
96 | super(IPAppCrashHandler,self).__init__( | |
100 | app, contact_name, contact_email, bug_tracker |
|
97 | app, contact_name, contact_email, bug_tracker | |
101 | ) |
|
98 | ) | |
102 |
|
99 | |||
103 | def make_report(self,traceback): |
|
100 | def make_report(self,traceback): | |
104 | """Return a string containing a crash report.""" |
|
101 | """Return a string containing a crash report.""" | |
105 |
|
102 | |||
106 | sec_sep = self.section_sep |
|
103 | sec_sep = self.section_sep | |
107 | # Start with parent report |
|
104 | # Start with parent report | |
108 | report = [super(IPAppCrashHandler, self).make_report(traceback)] |
|
105 | report = [super(IPAppCrashHandler, self).make_report(traceback)] | |
109 | # Add interactive-specific info we may have |
|
106 | # Add interactive-specific info we may have | |
110 | rpt_add = report.append |
|
107 | rpt_add = report.append | |
111 | try: |
|
108 | try: | |
112 | rpt_add(sec_sep+"History of session input:") |
|
109 | rpt_add(sec_sep+"History of session input:") | |
113 | for line in self.app.shell.user_ns['_ih']: |
|
110 | for line in self.app.shell.user_ns['_ih']: | |
114 | rpt_add(line) |
|
111 | rpt_add(line) | |
115 | rpt_add('\n*** Last line of input (may not be in above history):\n') |
|
112 | rpt_add('\n*** Last line of input (may not be in above history):\n') | |
116 | rpt_add(self.app.shell._last_input_line+'\n') |
|
113 | rpt_add(self.app.shell._last_input_line+'\n') | |
117 | except: |
|
114 | except: | |
118 | pass |
|
115 | pass | |
119 |
|
116 | |||
120 | return ''.join(report) |
|
117 | return ''.join(report) | |
121 |
|
118 | |||
122 | #----------------------------------------------------------------------------- |
|
119 | #----------------------------------------------------------------------------- | |
123 | # Aliases and Flags |
|
120 | # Aliases and Flags | |
124 | #----------------------------------------------------------------------------- |
|
121 | #----------------------------------------------------------------------------- | |
125 | flags = dict(base_flags) |
|
122 | flags = dict(base_flags) | |
126 | flags.update(shell_flags) |
|
123 | flags.update(shell_flags) | |
127 | frontend_flags = {} |
|
124 | frontend_flags = {} | |
128 | addflag = lambda *args: frontend_flags.update(boolean_flag(*args)) |
|
125 | addflag = lambda *args: frontend_flags.update(boolean_flag(*args)) | |
129 | addflag('autoedit-syntax', 'TerminalInteractiveShell.autoedit_syntax', |
|
126 | addflag('autoedit-syntax', 'TerminalInteractiveShell.autoedit_syntax', | |
130 | 'Turn on auto editing of files with syntax errors.', |
|
127 | 'Turn on auto editing of files with syntax errors.', | |
131 | 'Turn off auto editing of files with syntax errors.' |
|
128 | 'Turn off auto editing of files with syntax errors.' | |
132 | ) |
|
129 | ) | |
133 | addflag('banner', 'TerminalIPythonApp.display_banner', |
|
130 | addflag('banner', 'TerminalIPythonApp.display_banner', | |
134 | "Display a banner upon starting IPython.", |
|
131 | "Display a banner upon starting IPython.", | |
135 | "Don't display a banner upon starting IPython." |
|
132 | "Don't display a banner upon starting IPython." | |
136 | ) |
|
133 | ) | |
137 | addflag('confirm-exit', 'TerminalInteractiveShell.confirm_exit', |
|
134 | addflag('confirm-exit', 'TerminalInteractiveShell.confirm_exit', | |
138 | """Set to confirm when you try to exit IPython with an EOF (Control-D |
|
135 | """Set to confirm when you try to exit IPython with an EOF (Control-D | |
139 | in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit', |
|
136 | in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit', | |
140 | you can force a direct exit without any confirmation.""", |
|
137 | you can force a direct exit without any confirmation.""", | |
141 | "Don't prompt the user when exiting." |
|
138 | "Don't prompt the user when exiting." | |
142 | ) |
|
139 | ) | |
143 | addflag('term-title', 'TerminalInteractiveShell.term_title', |
|
140 | addflag('term-title', 'TerminalInteractiveShell.term_title', | |
144 | "Enable auto setting the terminal title.", |
|
141 | "Enable auto setting the terminal title.", | |
145 | "Disable auto setting the terminal title." |
|
142 | "Disable auto setting the terminal title." | |
146 | ) |
|
143 | ) | |
147 | classic_config = Config() |
|
144 | classic_config = Config() | |
148 | classic_config.InteractiveShell.cache_size = 0 |
|
145 | classic_config.InteractiveShell.cache_size = 0 | |
149 | classic_config.PlainTextFormatter.pprint = False |
|
146 | classic_config.PlainTextFormatter.pprint = False | |
150 | classic_config.PromptManager.in_template = '>>> ' |
|
147 | classic_config.PromptManager.in_template = '>>> ' | |
151 | classic_config.PromptManager.in2_template = '... ' |
|
148 | classic_config.PromptManager.in2_template = '... ' | |
152 | classic_config.PromptManager.out_template = '' |
|
149 | classic_config.PromptManager.out_template = '' | |
153 | classic_config.InteractiveShell.separate_in = '' |
|
150 | classic_config.InteractiveShell.separate_in = '' | |
154 | classic_config.InteractiveShell.separate_out = '' |
|
151 | classic_config.InteractiveShell.separate_out = '' | |
155 | classic_config.InteractiveShell.separate_out2 = '' |
|
152 | classic_config.InteractiveShell.separate_out2 = '' | |
156 | classic_config.InteractiveShell.colors = 'NoColor' |
|
153 | classic_config.InteractiveShell.colors = 'NoColor' | |
157 | classic_config.InteractiveShell.xmode = 'Plain' |
|
154 | classic_config.InteractiveShell.xmode = 'Plain' | |
158 |
|
155 | |||
159 | frontend_flags['classic']=( |
|
156 | frontend_flags['classic']=( | |
160 | classic_config, |
|
157 | classic_config, | |
161 | "Gives IPython a similar feel to the classic Python prompt." |
|
158 | "Gives IPython a similar feel to the classic Python prompt." | |
162 | ) |
|
159 | ) | |
163 | # # log doesn't make so much sense this way anymore |
|
160 | # # log doesn't make so much sense this way anymore | |
164 | # paa('--log','-l', |
|
161 | # paa('--log','-l', | |
165 | # action='store_true', dest='InteractiveShell.logstart', |
|
162 | # action='store_true', dest='InteractiveShell.logstart', | |
166 | # help="Start logging to the default log file (./ipython_log.py).") |
|
163 | # help="Start logging to the default log file (./ipython_log.py).") | |
167 | # |
|
164 | # | |
168 | # # quick is harder to implement |
|
165 | # # quick is harder to implement | |
169 | frontend_flags['quick']=( |
|
166 | frontend_flags['quick']=( | |
170 | {'TerminalIPythonApp' : {'quick' : True}}, |
|
167 | {'TerminalIPythonApp' : {'quick' : True}}, | |
171 | "Enable quick startup with no config files." |
|
168 | "Enable quick startup with no config files." | |
172 | ) |
|
169 | ) | |
173 |
|
170 | |||
174 | frontend_flags['i'] = ( |
|
171 | frontend_flags['i'] = ( | |
175 | {'TerminalIPythonApp' : {'force_interact' : True}}, |
|
172 | {'TerminalIPythonApp' : {'force_interact' : True}}, | |
176 | """If running code from the command line, become interactive afterwards. |
|
173 | """If running code from the command line, become interactive afterwards. | |
177 | Note: can also be given simply as '-i.'""" |
|
174 | Note: can also be given simply as '-i.'""" | |
178 | ) |
|
175 | ) | |
179 | flags.update(frontend_flags) |
|
176 | flags.update(frontend_flags) | |
180 |
|
177 | |||
181 | aliases = dict(base_aliases) |
|
178 | aliases = dict(base_aliases) | |
182 | aliases.update(shell_aliases) |
|
179 | aliases.update(shell_aliases) | |
183 |
|
180 | |||
184 | #----------------------------------------------------------------------------- |
|
181 | #----------------------------------------------------------------------------- | |
185 | # Main classes and functions |
|
182 | # Main classes and functions | |
186 | #----------------------------------------------------------------------------- |
|
183 | #----------------------------------------------------------------------------- | |
187 |
|
184 | |||
188 |
|
185 | |||
189 | class LocateIPythonApp(BaseIPythonApplication): |
|
186 | class LocateIPythonApp(BaseIPythonApplication): | |
190 | description = """print the path to the IPython dir""" |
|
187 | description = """print the path to the IPython dir""" | |
191 | subcommands = Dict(dict( |
|
188 | subcommands = Dict(dict( | |
192 | profile=('IPython.core.profileapp.ProfileLocate', |
|
189 | profile=('IPython.core.profileapp.ProfileLocate', | |
193 | "print the path to an IPython profile directory", |
|
190 | "print the path to an IPython profile directory", | |
194 | ), |
|
191 | ), | |
195 | )) |
|
192 | )) | |
196 | def start(self): |
|
193 | def start(self): | |
197 | if self.subapp is not None: |
|
194 | if self.subapp is not None: | |
198 | return self.subapp.start() |
|
195 | return self.subapp.start() | |
199 | else: |
|
196 | else: | |
200 | print self.ipython_dir |
|
197 | print self.ipython_dir | |
201 |
|
198 | |||
202 |
|
199 | |||
203 | class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp): |
|
200 | class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp): | |
204 | name = u'ipython' |
|
201 | name = u'ipython' | |
205 | description = usage.cl_usage |
|
202 | description = usage.cl_usage | |
206 | default_config_file_name = default_config_file_name |
|
|||
207 | crash_handler_class = IPAppCrashHandler |
|
203 | crash_handler_class = IPAppCrashHandler | |
208 | examples = _examples |
|
204 | examples = _examples | |
209 |
|
205 | |||
210 | flags = Dict(flags) |
|
206 | flags = Dict(flags) | |
211 | aliases = Dict(aliases) |
|
207 | aliases = Dict(aliases) | |
212 | classes = List() |
|
208 | classes = List() | |
213 | def _classes_default(self): |
|
209 | def _classes_default(self): | |
214 | """This has to be in a method, for TerminalIPythonApp to be available.""" |
|
210 | """This has to be in a method, for TerminalIPythonApp to be available.""" | |
215 | return [ |
|
211 | return [ | |
216 | InteractiveShellApp, # ShellApp comes before TerminalApp, because |
|
212 | InteractiveShellApp, # ShellApp comes before TerminalApp, because | |
217 | self.__class__, # it will also affect subclasses (e.g. QtConsole) |
|
213 | self.__class__, # it will also affect subclasses (e.g. QtConsole) | |
218 | TerminalInteractiveShell, |
|
214 | TerminalInteractiveShell, | |
219 | PromptManager, |
|
215 | PromptManager, | |
220 | HistoryManager, |
|
216 | HistoryManager, | |
221 | ProfileDir, |
|
217 | ProfileDir, | |
222 | PlainTextFormatter, |
|
218 | PlainTextFormatter, | |
223 | IPCompleter, |
|
219 | IPCompleter, | |
224 | ScriptMagics, |
|
220 | ScriptMagics, | |
225 | ] |
|
221 | ] | |
226 |
|
222 | |||
227 | subcommands = Dict(dict( |
|
223 | subcommands = Dict(dict( | |
228 | qtconsole=('IPython.qt.console.qtconsoleapp.IPythonQtConsoleApp', |
|
224 | qtconsole=('IPython.qt.console.qtconsoleapp.IPythonQtConsoleApp', | |
229 | """Launch the IPython Qt Console.""" |
|
225 | """Launch the IPython Qt Console.""" | |
230 | ), |
|
226 | ), | |
231 | notebook=('IPython.html.notebookapp.NotebookApp', |
|
227 | notebook=('IPython.html.notebookapp.NotebookApp', | |
232 | """Launch the IPython HTML Notebook Server.""" |
|
228 | """Launch the IPython HTML Notebook Server.""" | |
233 | ), |
|
229 | ), | |
234 | profile = ("IPython.core.profileapp.ProfileApp", |
|
230 | profile = ("IPython.core.profileapp.ProfileApp", | |
235 | "Create and manage IPython profiles." |
|
231 | "Create and manage IPython profiles." | |
236 | ), |
|
232 | ), | |
237 | kernel = ("IPython.kernel.zmq.kernelapp.IPKernelApp", |
|
233 | kernel = ("IPython.kernel.zmq.kernelapp.IPKernelApp", | |
238 | "Start a kernel without an attached frontend." |
|
234 | "Start a kernel without an attached frontend." | |
239 | ), |
|
235 | ), | |
240 | console=('IPython.terminal.console.app.ZMQTerminalIPythonApp', |
|
236 | console=('IPython.terminal.console.app.ZMQTerminalIPythonApp', | |
241 | """Launch the IPython terminal-based Console.""" |
|
237 | """Launch the IPython terminal-based Console.""" | |
242 | ), |
|
238 | ), | |
243 | locate=('IPython.terminal.ipapp.LocateIPythonApp', |
|
239 | locate=('IPython.terminal.ipapp.LocateIPythonApp', | |
244 | LocateIPythonApp.description |
|
240 | LocateIPythonApp.description | |
245 | ), |
|
241 | ), | |
246 | history=('IPython.core.historyapp.HistoryApp', |
|
242 | history=('IPython.core.historyapp.HistoryApp', | |
247 | "Manage the IPython history database." |
|
243 | "Manage the IPython history database." | |
248 | ), |
|
244 | ), | |
249 | nbconvert=('IPython.nbconvert.nbconvertapp.NbConvertApp', |
|
245 | nbconvert=('IPython.nbconvert.nbconvertapp.NbConvertApp', | |
250 | "Convert notebooks to/from other formats." |
|
246 | "Convert notebooks to/from other formats." | |
251 | ), |
|
247 | ), | |
252 | )) |
|
248 | )) | |
253 |
|
249 | |||
254 | # *do* autocreate requested profile, but don't create the config file. |
|
250 | # *do* autocreate requested profile, but don't create the config file. | |
255 | auto_create=Bool(True) |
|
251 | auto_create=Bool(True) | |
256 | # configurables |
|
252 | # configurables | |
257 | ignore_old_config=Bool(False, config=True, |
|
253 | ignore_old_config=Bool(False, config=True, | |
258 | help="Suppress warning messages about legacy config files" |
|
254 | help="Suppress warning messages about legacy config files" | |
259 | ) |
|
255 | ) | |
260 | quick = Bool(False, config=True, |
|
256 | quick = Bool(False, config=True, | |
261 | help="""Start IPython quickly by skipping the loading of config files.""" |
|
257 | help="""Start IPython quickly by skipping the loading of config files.""" | |
262 | ) |
|
258 | ) | |
263 | def _quick_changed(self, name, old, new): |
|
259 | def _quick_changed(self, name, old, new): | |
264 | if new: |
|
260 | if new: | |
265 | self.load_config_file = lambda *a, **kw: None |
|
261 | self.load_config_file = lambda *a, **kw: None | |
266 | self.ignore_old_config=True |
|
262 | self.ignore_old_config=True | |
267 |
|
263 | |||
268 | display_banner = Bool(True, config=True, |
|
264 | display_banner = Bool(True, config=True, | |
269 | help="Whether to display a banner upon starting IPython." |
|
265 | help="Whether to display a banner upon starting IPython." | |
270 | ) |
|
266 | ) | |
271 |
|
267 | |||
272 | # if there is code of files to run from the cmd line, don't interact |
|
268 | # if there is code of files to run from the cmd line, don't interact | |
273 | # unless the --i flag (App.force_interact) is true. |
|
269 | # unless the --i flag (App.force_interact) is true. | |
274 | force_interact = Bool(False, config=True, |
|
270 | force_interact = Bool(False, config=True, | |
275 | help="""If a command or file is given via the command-line, |
|
271 | help="""If a command or file is given via the command-line, | |
276 | e.g. 'ipython foo.py""" |
|
272 | e.g. 'ipython foo.py""" | |
277 | ) |
|
273 | ) | |
278 | def _force_interact_changed(self, name, old, new): |
|
274 | def _force_interact_changed(self, name, old, new): | |
279 | if new: |
|
275 | if new: | |
280 | self.interact = True |
|
276 | self.interact = True | |
281 |
|
277 | |||
282 | def _file_to_run_changed(self, name, old, new): |
|
278 | def _file_to_run_changed(self, name, old, new): | |
283 | if new: |
|
279 | if new: | |
284 | self.something_to_run = True |
|
280 | self.something_to_run = True | |
285 | if new and not self.force_interact: |
|
281 | if new and not self.force_interact: | |
286 | self.interact = False |
|
282 | self.interact = False | |
287 | _code_to_run_changed = _file_to_run_changed |
|
283 | _code_to_run_changed = _file_to_run_changed | |
288 | _module_to_run_changed = _file_to_run_changed |
|
284 | _module_to_run_changed = _file_to_run_changed | |
289 |
|
285 | |||
290 | # internal, not-configurable |
|
286 | # internal, not-configurable | |
291 | interact=Bool(True) |
|
287 | interact=Bool(True) | |
292 | something_to_run=Bool(False) |
|
288 | something_to_run=Bool(False) | |
293 |
|
289 | |||
294 | def parse_command_line(self, argv=None): |
|
290 | def parse_command_line(self, argv=None): | |
295 | """override to allow old '-pylab' flag with deprecation warning""" |
|
291 | """override to allow old '-pylab' flag with deprecation warning""" | |
296 |
|
292 | |||
297 | argv = sys.argv[1:] if argv is None else argv |
|
293 | argv = sys.argv[1:] if argv is None else argv | |
298 |
|
294 | |||
299 | if '-pylab' in argv: |
|
295 | if '-pylab' in argv: | |
300 | # deprecated `-pylab` given, |
|
296 | # deprecated `-pylab` given, | |
301 | # warn and transform into current syntax |
|
297 | # warn and transform into current syntax | |
302 | argv = argv[:] # copy, don't clobber |
|
298 | argv = argv[:] # copy, don't clobber | |
303 | idx = argv.index('-pylab') |
|
299 | idx = argv.index('-pylab') | |
304 | warn.warn("`-pylab` flag has been deprecated.\n" |
|
300 | warn.warn("`-pylab` flag has been deprecated.\n" | |
305 | " Use `--pylab` instead, or `--pylab=foo` to specify a backend.") |
|
301 | " Use `--pylab` instead, or `--pylab=foo` to specify a backend.") | |
306 | sub = '--pylab' |
|
302 | sub = '--pylab' | |
307 | if len(argv) > idx+1: |
|
303 | if len(argv) > idx+1: | |
308 | # check for gui arg, as in '-pylab qt' |
|
304 | # check for gui arg, as in '-pylab qt' | |
309 | gui = argv[idx+1] |
|
305 | gui = argv[idx+1] | |
310 | if gui in ('wx', 'qt', 'qt4', 'gtk', 'auto'): |
|
306 | if gui in ('wx', 'qt', 'qt4', 'gtk', 'auto'): | |
311 | sub = '--pylab='+gui |
|
307 | sub = '--pylab='+gui | |
312 | argv.pop(idx+1) |
|
308 | argv.pop(idx+1) | |
313 | argv[idx] = sub |
|
309 | argv[idx] = sub | |
314 |
|
310 | |||
315 | return super(TerminalIPythonApp, self).parse_command_line(argv) |
|
311 | return super(TerminalIPythonApp, self).parse_command_line(argv) | |
316 |
|
312 | |||
317 | @catch_config_error |
|
313 | @catch_config_error | |
318 | def initialize(self, argv=None): |
|
314 | def initialize(self, argv=None): | |
319 | """Do actions after construct, but before starting the app.""" |
|
315 | """Do actions after construct, but before starting the app.""" | |
320 | super(TerminalIPythonApp, self).initialize(argv) |
|
316 | super(TerminalIPythonApp, self).initialize(argv) | |
321 | if self.subapp is not None: |
|
317 | if self.subapp is not None: | |
322 | # don't bother initializing further, starting subapp |
|
318 | # don't bother initializing further, starting subapp | |
323 | return |
|
319 | return | |
324 | if not self.ignore_old_config: |
|
320 | if not self.ignore_old_config: | |
325 | check_for_old_config(self.ipython_dir) |
|
321 | check_for_old_config(self.ipython_dir) | |
326 | # print self.extra_args |
|
322 | # print self.extra_args | |
327 | if self.extra_args and not self.something_to_run: |
|
323 | if self.extra_args and not self.something_to_run: | |
328 | self.file_to_run = self.extra_args[0] |
|
324 | self.file_to_run = self.extra_args[0] | |
329 | self.init_path() |
|
325 | self.init_path() | |
330 | # create the shell |
|
326 | # create the shell | |
331 | self.init_shell() |
|
327 | self.init_shell() | |
332 | # and draw the banner |
|
328 | # and draw the banner | |
333 | self.init_banner() |
|
329 | self.init_banner() | |
334 | # Now a variety of things that happen after the banner is printed. |
|
330 | # Now a variety of things that happen after the banner is printed. | |
335 | self.init_gui_pylab() |
|
331 | self.init_gui_pylab() | |
336 | self.init_extensions() |
|
332 | self.init_extensions() | |
337 | self.init_code() |
|
333 | self.init_code() | |
338 |
|
334 | |||
339 | def init_shell(self): |
|
335 | def init_shell(self): | |
340 | """initialize the InteractiveShell instance""" |
|
336 | """initialize the InteractiveShell instance""" | |
341 | # Create an InteractiveShell instance. |
|
337 | # Create an InteractiveShell instance. | |
342 | # shell.display_banner should always be False for the terminal |
|
338 | # shell.display_banner should always be False for the terminal | |
343 | # based app, because we call shell.show_banner() by hand below |
|
339 | # based app, because we call shell.show_banner() by hand below | |
344 | # so the banner shows *before* all extension loading stuff. |
|
340 | # so the banner shows *before* all extension loading stuff. | |
345 | self.shell = TerminalInteractiveShell.instance(parent=self, |
|
341 | self.shell = TerminalInteractiveShell.instance(parent=self, | |
346 | display_banner=False, profile_dir=self.profile_dir, |
|
342 | display_banner=False, profile_dir=self.profile_dir, | |
347 | ipython_dir=self.ipython_dir) |
|
343 | ipython_dir=self.ipython_dir) | |
348 | self.shell.configurables.append(self) |
|
344 | self.shell.configurables.append(self) | |
349 |
|
345 | |||
350 | def init_banner(self): |
|
346 | def init_banner(self): | |
351 | """optionally display the banner""" |
|
347 | """optionally display the banner""" | |
352 | if self.display_banner and self.interact: |
|
348 | if self.display_banner and self.interact: | |
353 | self.shell.show_banner() |
|
349 | self.shell.show_banner() | |
354 | # Make sure there is a space below the banner. |
|
350 | # Make sure there is a space below the banner. | |
355 | if self.log_level <= logging.INFO: print |
|
351 | if self.log_level <= logging.INFO: print | |
356 |
|
352 | |||
357 | def _pylab_changed(self, name, old, new): |
|
353 | def _pylab_changed(self, name, old, new): | |
358 | """Replace --pylab='inline' with --pylab='auto'""" |
|
354 | """Replace --pylab='inline' with --pylab='auto'""" | |
359 | if new == 'inline': |
|
355 | if new == 'inline': | |
360 | warn.warn("'inline' not available as pylab backend, " |
|
356 | warn.warn("'inline' not available as pylab backend, " | |
361 | "using 'auto' instead.") |
|
357 | "using 'auto' instead.") | |
362 | self.pylab = 'auto' |
|
358 | self.pylab = 'auto' | |
363 |
|
359 | |||
364 | def start(self): |
|
360 | def start(self): | |
365 | if self.subapp is not None: |
|
361 | if self.subapp is not None: | |
366 | return self.subapp.start() |
|
362 | return self.subapp.start() | |
367 | # perform any prexec steps: |
|
363 | # perform any prexec steps: | |
368 | if self.interact: |
|
364 | if self.interact: | |
369 | self.log.debug("Starting IPython's mainloop...") |
|
365 | self.log.debug("Starting IPython's mainloop...") | |
370 | self.shell.mainloop() |
|
366 | self.shell.mainloop() | |
371 | else: |
|
367 | else: | |
372 | self.log.debug("IPython not interactive...") |
|
368 | self.log.debug("IPython not interactive...") | |
373 |
|
369 | |||
374 |
|
370 | |||
375 | def load_default_config(ipython_dir=None): |
|
371 | def load_default_config(ipython_dir=None): | |
376 | """Load the default config file from the default ipython_dir. |
|
372 | """Load the default config file from the default ipython_dir. | |
377 |
|
373 | |||
378 | This is useful for embedded shells. |
|
374 | This is useful for embedded shells. | |
379 | """ |
|
375 | """ | |
380 | if ipython_dir is None: |
|
376 | if ipython_dir is None: | |
381 | ipython_dir = get_ipython_dir() |
|
377 | ipython_dir = get_ipython_dir() | |
382 | profile_dir = os.path.join(ipython_dir, 'profile_default') |
|
378 | profile_dir = os.path.join(ipython_dir, 'profile_default') | |
383 |
cl = PyFileConfigLoader( |
|
379 | cl = PyFileConfigLoader("ipython_config.py", profile_dir) | |
384 | try: |
|
380 | try: | |
385 | config = cl.load_config() |
|
381 | config = cl.load_config() | |
386 | except ConfigFileNotFound: |
|
382 | except ConfigFileNotFound: | |
387 | # no config found |
|
383 | # no config found | |
388 | config = Config() |
|
384 | config = Config() | |
389 | return config |
|
385 | return config | |
390 |
|
386 | |||
391 |
|
387 | |||
392 | launch_new_instance = TerminalIPythonApp.launch_instance |
|
388 | launch_new_instance = TerminalIPythonApp.launch_instance | |
393 |
|
389 | |||
394 |
|
390 | |||
395 | if __name__ == '__main__': |
|
391 | if __name__ == '__main__': | |
396 | launch_new_instance() |
|
392 | launch_new_instance() |
General Comments 0
You need to be logged in to leave comments.
Login now