##// END OF EJS Templates
use launch_new_instance classmethod to launch apps
MinRK -
Show More
@@ -1,752 +1,749 b''
1 1 # coding: utf-8
2 2 """A tornado based IPython notebook server.
3 3
4 4 Authors:
5 5
6 6 * Brian Granger
7 7 """
8 8 #-----------------------------------------------------------------------------
9 9 # Copyright (C) 2013 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-----------------------------------------------------------------------------
14 14
15 15 #-----------------------------------------------------------------------------
16 16 # Imports
17 17 #-----------------------------------------------------------------------------
18 18
19 19 # stdlib
20 20 import errno
21 21 import logging
22 22 import os
23 23 import random
24 24 import select
25 25 import signal
26 26 import socket
27 27 import sys
28 28 import threading
29 29 import time
30 30 import webbrowser
31 31
32 32
33 33 # Third party
34 34 # check for pyzmq 2.1.11
35 35 from IPython.utils.zmqrelated import check_for_zmq
36 36 check_for_zmq('2.1.11', 'IPython.html')
37 37
38 38 from jinja2 import Environment, FileSystemLoader
39 39
40 40 # Install the pyzmq ioloop. This has to be done before anything else from
41 41 # tornado is imported.
42 42 from zmq.eventloop import ioloop
43 43 ioloop.install()
44 44
45 45 # check for tornado 2.1.0
46 46 msg = "The IPython Notebook requires tornado >= 2.1.0"
47 47 try:
48 48 import tornado
49 49 except ImportError:
50 50 raise ImportError(msg)
51 51 try:
52 52 version_info = tornado.version_info
53 53 except AttributeError:
54 54 raise ImportError(msg + ", but you have < 1.1.0")
55 55 if version_info < (2,1,0):
56 56 raise ImportError(msg + ", but you have %s" % tornado.version)
57 57
58 58 from tornado import httpserver
59 59 from tornado import web
60 60
61 61 # Our own libraries
62 62 from IPython.html import DEFAULT_STATIC_FILES_PATH
63 63
64 64 from .services.kernels.kernelmanager import MappingKernelManager
65 65 from .services.notebooks.nbmanager import NotebookManager
66 66 from .services.notebooks.filenbmanager import FileNotebookManager
67 67 from .services.clusters.clustermanager import ClusterManager
68 68
69 69 from .base.handlers import AuthenticatedFileHandler, FileFindHandler
70 70
71 71 from IPython.config.application import catch_config_error, boolean_flag
72 72 from IPython.core.application import BaseIPythonApplication
73 73 from IPython.consoleapp import IPythonConsoleApp
74 74 from IPython.kernel import swallow_argv
75 75 from IPython.kernel.zmq.session import default_secure
76 76 from IPython.kernel.zmq.kernelapp import (
77 77 kernel_flags,
78 78 kernel_aliases,
79 79 )
80 80 from IPython.utils.importstring import import_item
81 81 from IPython.utils.localinterfaces import LOCALHOST
82 82 from IPython.utils import submodule
83 83 from IPython.utils.traitlets import (
84 84 Dict, Unicode, Integer, List, Bool, Bytes,
85 85 DottedObjectName
86 86 )
87 87 from IPython.utils import py3compat
88 88 from IPython.utils.path import filefind
89 89
90 90 from .utils import url_path_join
91 91
92 92 #-----------------------------------------------------------------------------
93 93 # Module globals
94 94 #-----------------------------------------------------------------------------
95 95
96 96 _examples = """
97 97 ipython notebook # start the notebook
98 98 ipython notebook --profile=sympy # use the sympy profile
99 99 ipython notebook --pylab=inline # pylab in inline plotting mode
100 100 ipython notebook --certfile=mycert.pem # use SSL/TLS certificate
101 101 ipython notebook --port=5555 --ip=* # Listen on port 5555, all interfaces
102 102 """
103 103
104 104 #-----------------------------------------------------------------------------
105 105 # Helper functions
106 106 #-----------------------------------------------------------------------------
107 107
108 108 def random_ports(port, n):
109 109 """Generate a list of n random ports near the given port.
110 110
111 111 The first 5 ports will be sequential, and the remaining n-5 will be
112 112 randomly selected in the range [port-2*n, port+2*n].
113 113 """
114 114 for i in range(min(5, n)):
115 115 yield port + i
116 116 for i in range(n-5):
117 117 yield port + random.randint(-2*n, 2*n)
118 118
119 119 def load_handlers(name):
120 120 """Load the (URL pattern, handler) tuples for each component."""
121 121 name = 'IPython.html.' + name
122 122 mod = __import__(name, fromlist=['default_handlers'])
123 123 return mod.default_handlers
124 124
125 125 #-----------------------------------------------------------------------------
126 126 # The Tornado web application
127 127 #-----------------------------------------------------------------------------
128 128
129 129 class NotebookWebApplication(web.Application):
130 130
131 131 def __init__(self, ipython_app, kernel_manager, notebook_manager,
132 132 cluster_manager, log,
133 133 base_project_url, settings_overrides):
134 134
135 135 settings = self.init_settings(
136 136 ipython_app, kernel_manager, notebook_manager, cluster_manager,
137 137 log, base_project_url, settings_overrides)
138 138 handlers = self.init_handlers(settings)
139 139
140 140 super(NotebookWebApplication, self).__init__(handlers, **settings)
141 141
142 142 def init_settings(self, ipython_app, kernel_manager, notebook_manager,
143 143 cluster_manager, log,
144 144 base_project_url, settings_overrides):
145 145 # Python < 2.6.5 doesn't accept unicode keys in f(**kwargs), and
146 146 # base_project_url will always be unicode, which will in turn
147 147 # make the patterns unicode, and ultimately result in unicode
148 148 # keys in kwargs to handler._execute(**kwargs) in tornado.
149 149 # This enforces that base_project_url be ascii in that situation.
150 150 #
151 151 # Note that the URLs these patterns check against are escaped,
152 152 # and thus guaranteed to be ASCII: 'hΓ©llo' is really 'h%C3%A9llo'.
153 153 base_project_url = py3compat.unicode_to_str(base_project_url, 'ascii')
154 154 template_path = os.path.join(os.path.dirname(__file__), "templates")
155 155 settings = dict(
156 156 # basics
157 157 base_project_url=base_project_url,
158 158 base_kernel_url=ipython_app.base_kernel_url,
159 159 template_path=template_path,
160 160 static_path=ipython_app.static_file_path,
161 161 static_handler_class = FileFindHandler,
162 162 static_url_prefix = url_path_join(base_project_url,'/static/'),
163 163
164 164 # authentication
165 165 cookie_secret=ipython_app.cookie_secret,
166 166 login_url=url_path_join(base_project_url,'/login'),
167 167 read_only=ipython_app.read_only,
168 168 password=ipython_app.password,
169 169
170 170 # managers
171 171 kernel_manager=kernel_manager,
172 172 notebook_manager=notebook_manager,
173 173 cluster_manager=cluster_manager,
174 174
175 175 # IPython stuff
176 176 mathjax_url=ipython_app.mathjax_url,
177 177 max_msg_size=ipython_app.max_msg_size,
178 178 config=ipython_app.config,
179 179 use_less=ipython_app.use_less,
180 180 jinja2_env=Environment(loader=FileSystemLoader(template_path)),
181 181 )
182 182
183 183 # allow custom overrides for the tornado web app.
184 184 settings.update(settings_overrides)
185 185 return settings
186 186
187 187 def init_handlers(self, settings):
188 188 # Load the (URL pattern, handler) tuples for each component.
189 189 handlers = []
190 190 handlers.extend(load_handlers('base.handlers'))
191 191 handlers.extend(load_handlers('tree.handlers'))
192 192 handlers.extend(load_handlers('auth.login'))
193 193 handlers.extend(load_handlers('auth.logout'))
194 194 handlers.extend(load_handlers('notebook.handlers'))
195 195 handlers.extend(load_handlers('services.kernels.handlers'))
196 196 handlers.extend(load_handlers('services.notebooks.handlers'))
197 197 handlers.extend(load_handlers('services.clusters.handlers'))
198 198 handlers.extend([
199 199 (r"/files/(.*)", AuthenticatedFileHandler, {'path' : settings['notebook_manager'].notebook_dir}),
200 200 ])
201 201 # prepend base_project_url onto the patterns that we match
202 202 new_handlers = []
203 203 for handler in handlers:
204 204 pattern = url_path_join(settings['base_project_url'], handler[0])
205 205 new_handler = tuple([pattern] + list(handler[1:]))
206 206 new_handlers.append(new_handler)
207 207 return new_handlers
208 208
209 209
210 210
211 211 #-----------------------------------------------------------------------------
212 212 # Aliases and Flags
213 213 #-----------------------------------------------------------------------------
214 214
215 215 flags = dict(kernel_flags)
216 216 flags['no-browser']=(
217 217 {'NotebookApp' : {'open_browser' : False}},
218 218 "Don't open the notebook in a browser after startup."
219 219 )
220 220 flags['no-mathjax']=(
221 221 {'NotebookApp' : {'enable_mathjax' : False}},
222 222 """Disable MathJax
223 223
224 224 MathJax is the javascript library IPython uses to render math/LaTeX. It is
225 225 very large, so you may want to disable it if you have a slow internet
226 226 connection, or for offline use of the notebook.
227 227
228 228 When disabled, equations etc. will appear as their untransformed TeX source.
229 229 """
230 230 )
231 231 flags['read-only'] = (
232 232 {'NotebookApp' : {'read_only' : True}},
233 233 """Allow read-only access to notebooks.
234 234
235 235 When using a password to protect the notebook server, this flag
236 236 allows unauthenticated clients to view the notebook list, and
237 237 individual notebooks, but not edit them, start kernels, or run
238 238 code.
239 239
240 240 If no password is set, the server will be entirely read-only.
241 241 """
242 242 )
243 243
244 244 # Add notebook manager flags
245 245 flags.update(boolean_flag('script', 'FileNotebookManager.save_script',
246 246 'Auto-save a .py script everytime the .ipynb notebook is saved',
247 247 'Do not auto-save .py scripts for every notebook'))
248 248
249 249 # the flags that are specific to the frontend
250 250 # these must be scrubbed before being passed to the kernel,
251 251 # or it will raise an error on unrecognized flags
252 252 notebook_flags = ['no-browser', 'no-mathjax', 'read-only', 'script', 'no-script']
253 253
254 254 aliases = dict(kernel_aliases)
255 255
256 256 aliases.update({
257 257 'ip': 'NotebookApp.ip',
258 258 'port': 'NotebookApp.port',
259 259 'port-retries': 'NotebookApp.port_retries',
260 260 'transport': 'KernelManager.transport',
261 261 'keyfile': 'NotebookApp.keyfile',
262 262 'certfile': 'NotebookApp.certfile',
263 263 'notebook-dir': 'NotebookManager.notebook_dir',
264 264 'browser': 'NotebookApp.browser',
265 265 })
266 266
267 267 # remove ipkernel flags that are singletons, and don't make sense in
268 268 # multi-kernel evironment:
269 269 aliases.pop('f', None)
270 270
271 271 notebook_aliases = [u'port', u'port-retries', u'ip', u'keyfile', u'certfile',
272 272 u'notebook-dir']
273 273
274 274 #-----------------------------------------------------------------------------
275 275 # NotebookApp
276 276 #-----------------------------------------------------------------------------
277 277
278 278 class NotebookApp(BaseIPythonApplication):
279 279
280 280 name = 'ipython-notebook'
281 281 default_config_file_name='ipython_notebook_config.py'
282 282
283 283 description = """
284 284 The IPython HTML Notebook.
285 285
286 286 This launches a Tornado based HTML Notebook Server that serves up an
287 287 HTML5/Javascript Notebook client.
288 288 """
289 289 examples = _examples
290 290
291 291 classes = IPythonConsoleApp.classes + [MappingKernelManager, NotebookManager,
292 292 FileNotebookManager]
293 293 flags = Dict(flags)
294 294 aliases = Dict(aliases)
295 295
296 296 kernel_argv = List(Unicode)
297 297
298 298 max_msg_size = Integer(65536, config=True, help="""
299 299 The max raw message size accepted from the browser
300 300 over a WebSocket connection.
301 301 """)
302 302
303 303 def _log_level_default(self):
304 304 return logging.INFO
305 305
306 306 def _log_format_default(self):
307 307 """override default log format to include time"""
308 308 return u"%(asctime)s.%(msecs).03d [%(name)s]%(highlevel)s %(message)s"
309 309
310 310 # create requested profiles by default, if they don't exist:
311 311 auto_create = Bool(True)
312 312
313 313 # file to be opened in the notebook server
314 314 file_to_run = Unicode('')
315 315
316 316 # Network related information.
317 317
318 318 ip = Unicode(LOCALHOST, config=True,
319 319 help="The IP address the notebook server will listen on."
320 320 )
321 321
322 322 def _ip_changed(self, name, old, new):
323 323 if new == u'*': self.ip = u''
324 324
325 325 port = Integer(8888, config=True,
326 326 help="The port the notebook server will listen on."
327 327 )
328 328 port_retries = Integer(50, config=True,
329 329 help="The number of additional ports to try if the specified port is not available."
330 330 )
331 331
332 332 certfile = Unicode(u'', config=True,
333 333 help="""The full path to an SSL/TLS certificate file."""
334 334 )
335 335
336 336 keyfile = Unicode(u'', config=True,
337 337 help="""The full path to a private key file for usage with SSL/TLS."""
338 338 )
339 339
340 340 cookie_secret = Bytes(b'', config=True,
341 341 help="""The random bytes used to secure cookies.
342 342 By default this is a new random number every time you start the Notebook.
343 343 Set it to a value in a config file to enable logins to persist across server sessions.
344 344
345 345 Note: Cookie secrets should be kept private, do not share config files with
346 346 cookie_secret stored in plaintext (you can read the value from a file).
347 347 """
348 348 )
349 349 def _cookie_secret_default(self):
350 350 return os.urandom(1024)
351 351
352 352 password = Unicode(u'', config=True,
353 353 help="""Hashed password to use for web authentication.
354 354
355 355 To generate, type in a python/IPython shell:
356 356
357 357 from IPython.lib import passwd; passwd()
358 358
359 359 The string should be of the form type:salt:hashed-password.
360 360 """
361 361 )
362 362
363 363 open_browser = Bool(True, config=True,
364 364 help="""Whether to open in a browser after starting.
365 365 The specific browser used is platform dependent and
366 366 determined by the python standard library `webbrowser`
367 367 module, unless it is overridden using the --browser
368 368 (NotebookApp.browser) configuration option.
369 369 """)
370 370
371 371 browser = Unicode(u'', config=True,
372 372 help="""Specify what command to use to invoke a web
373 373 browser when opening the notebook. If not specified, the
374 374 default browser will be determined by the `webbrowser`
375 375 standard library module, which allows setting of the
376 376 BROWSER environment variable to override it.
377 377 """)
378 378
379 379 read_only = Bool(False, config=True,
380 380 help="Whether to prevent editing/execution of notebooks."
381 381 )
382 382
383 383 use_less = Bool(False, config=True,
384 384 help="""Wether to use Browser Side less-css parsing
385 385 instead of compiled css version in templates that allows
386 386 it. This is mainly convenient when working on the less
387 387 file to avoid a build step, or if user want to overwrite
388 388 some of the less variables without having to recompile
389 389 everything.
390 390
391 391 You will need to install the less.js component in the static directory
392 392 either in the source tree or in your profile folder.
393 393 """)
394 394
395 395 webapp_settings = Dict(config=True,
396 396 help="Supply overrides for the tornado.web.Application that the "
397 397 "IPython notebook uses.")
398 398
399 399 enable_mathjax = Bool(True, config=True,
400 400 help="""Whether to enable MathJax for typesetting math/TeX
401 401
402 402 MathJax is the javascript library IPython uses to render math/LaTeX. It is
403 403 very large, so you may want to disable it if you have a slow internet
404 404 connection, or for offline use of the notebook.
405 405
406 406 When disabled, equations etc. will appear as their untransformed TeX source.
407 407 """
408 408 )
409 409 def _enable_mathjax_changed(self, name, old, new):
410 410 """set mathjax url to empty if mathjax is disabled"""
411 411 if not new:
412 412 self.mathjax_url = u''
413 413
414 414 base_project_url = Unicode('/', config=True,
415 415 help='''The base URL for the notebook server.
416 416
417 417 Leading and trailing slashes can be omitted,
418 418 and will automatically be added.
419 419 ''')
420 420 def _base_project_url_changed(self, name, old, new):
421 421 if not new.startswith('/'):
422 422 self.base_project_url = '/'+new
423 423 elif not new.endswith('/'):
424 424 self.base_project_url = new+'/'
425 425
426 426 base_kernel_url = Unicode('/', config=True,
427 427 help='''The base URL for the kernel server
428 428
429 429 Leading and trailing slashes can be omitted,
430 430 and will automatically be added.
431 431 ''')
432 432 def _base_kernel_url_changed(self, name, old, new):
433 433 if not new.startswith('/'):
434 434 self.base_kernel_url = '/'+new
435 435 elif not new.endswith('/'):
436 436 self.base_kernel_url = new+'/'
437 437
438 438 websocket_url = Unicode("", config=True,
439 439 help="""The base URL for the websocket server,
440 440 if it differs from the HTTP server (hint: it almost certainly doesn't).
441 441
442 442 Should be in the form of an HTTP origin: ws[s]://hostname[:port]
443 443 """
444 444 )
445 445
446 446 extra_static_paths = List(Unicode, config=True,
447 447 help="""Extra paths to search for serving static files.
448 448
449 449 This allows adding javascript/css to be available from the notebook server machine,
450 450 or overriding individual files in the IPython"""
451 451 )
452 452 def _extra_static_paths_default(self):
453 453 return [os.path.join(self.profile_dir.location, 'static')]
454 454
455 455 @property
456 456 def static_file_path(self):
457 457 """return extra paths + the default location"""
458 458 return self.extra_static_paths + [DEFAULT_STATIC_FILES_PATH]
459 459
460 460 mathjax_url = Unicode("", config=True,
461 461 help="""The url for MathJax.js."""
462 462 )
463 463 def _mathjax_url_default(self):
464 464 if not self.enable_mathjax:
465 465 return u''
466 466 static_url_prefix = self.webapp_settings.get("static_url_prefix",
467 467 url_path_join(self.base_project_url, "static")
468 468 )
469 469 try:
470 470 mathjax = filefind(os.path.join('mathjax', 'MathJax.js'), self.static_file_path)
471 471 except IOError:
472 472 if self.certfile:
473 473 # HTTPS: load from Rackspace CDN, because SSL certificate requires it
474 474 base = u"https://c328740.ssl.cf1.rackcdn.com"
475 475 else:
476 476 base = u"http://cdn.mathjax.org"
477 477
478 478 url = base + u"/mathjax/latest/MathJax.js"
479 479 self.log.info("Using MathJax from CDN: %s", url)
480 480 return url
481 481 else:
482 482 self.log.info("Using local MathJax from %s" % mathjax)
483 483 return url_path_join(static_url_prefix, u"mathjax/MathJax.js")
484 484
485 485 def _mathjax_url_changed(self, name, old, new):
486 486 if new and not self.enable_mathjax:
487 487 # enable_mathjax=False overrides mathjax_url
488 488 self.mathjax_url = u''
489 489 else:
490 490 self.log.info("Using MathJax: %s", new)
491 491
492 492 notebook_manager_class = DottedObjectName('IPython.html.services.notebooks.filenbmanager.FileNotebookManager',
493 493 config=True,
494 494 help='The notebook manager class to use.')
495 495
496 496 trust_xheaders = Bool(False, config=True,
497 497 help=("Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded-For headers"
498 498 "sent by the upstream reverse proxy. Neccesary if the proxy handles SSL")
499 499 )
500 500
501 501 def parse_command_line(self, argv=None):
502 502 super(NotebookApp, self).parse_command_line(argv)
503 503 if argv is None:
504 504 argv = sys.argv[1:]
505 505
506 506 # Scrub frontend-specific flags
507 507 self.kernel_argv = swallow_argv(argv, notebook_aliases, notebook_flags)
508 508 # Kernel should inherit default config file from frontend
509 509 self.kernel_argv.append("--IPKernelApp.parent_appname='%s'" % self.name)
510 510
511 511 if self.extra_args:
512 512 f = os.path.abspath(self.extra_args[0])
513 513 if os.path.isdir(f):
514 514 nbdir = f
515 515 else:
516 516 self.file_to_run = f
517 517 nbdir = os.path.dirname(f)
518 518 self.config.NotebookManager.notebook_dir = nbdir
519 519
520 520 def init_configurables(self):
521 521 # force Session default to be secure
522 522 default_secure(self.config)
523 523 self.kernel_manager = MappingKernelManager(
524 524 parent=self, log=self.log, kernel_argv=self.kernel_argv,
525 525 connection_dir = self.profile_dir.security_dir,
526 526 )
527 527 kls = import_item(self.notebook_manager_class)
528 528 self.notebook_manager = kls(parent=self, log=self.log)
529 529 self.notebook_manager.load_notebook_names()
530 530 self.cluster_manager = ClusterManager(parent=self, log=self.log)
531 531 self.cluster_manager.update_profiles()
532 532
533 533 def init_logging(self):
534 534 # This prevents double log messages because tornado use a root logger that
535 535 # self.log is a child of. The logging module dipatches log messages to a log
536 536 # and all of its ancenstors until propagate is set to False.
537 537 self.log.propagate = False
538 538
539 539 # hook up tornado 3's loggers to our app handlers
540 540 for name in ('access', 'application', 'general'):
541 541 logging.getLogger('tornado.%s' % name).handlers = self.log.handlers
542 542
543 543 def init_webapp(self):
544 544 """initialize tornado webapp and httpserver"""
545 545 self.web_app = NotebookWebApplication(
546 546 self, self.kernel_manager, self.notebook_manager,
547 547 self.cluster_manager, self.log,
548 548 self.base_project_url, self.webapp_settings
549 549 )
550 550 if self.certfile:
551 551 ssl_options = dict(certfile=self.certfile)
552 552 if self.keyfile:
553 553 ssl_options['keyfile'] = self.keyfile
554 554 else:
555 555 ssl_options = None
556 556 self.web_app.password = self.password
557 557 self.http_server = httpserver.HTTPServer(self.web_app, ssl_options=ssl_options,
558 558 xheaders=self.trust_xheaders)
559 559 if not self.ip:
560 560 warning = "WARNING: The notebook server is listening on all IP addresses"
561 561 if ssl_options is None:
562 562 self.log.critical(warning + " and not using encryption. This "
563 563 "is not recommended.")
564 564 if not self.password and not self.read_only:
565 565 self.log.critical(warning + " and not using authentication. "
566 566 "This is highly insecure and not recommended.")
567 567 success = None
568 568 for port in random_ports(self.port, self.port_retries+1):
569 569 try:
570 570 self.http_server.listen(port, self.ip)
571 571 except socket.error as e:
572 572 # XXX: remove the e.errno == -9 block when we require
573 573 # tornado >= 3.0
574 574 if e.errno == -9 and tornado.version_info[0] < 3:
575 575 # The flags passed to socket.getaddrinfo from
576 576 # tornado.netutils.bind_sockets can cause "gaierror:
577 577 # [Errno -9] Address family for hostname not supported"
578 578 # when the interface is not associated, for example.
579 579 # Changing the flags to exclude socket.AI_ADDRCONFIG does
580 580 # not cause this error, but the only way to do this is to
581 581 # monkeypatch socket to remove the AI_ADDRCONFIG attribute
582 582 saved_AI_ADDRCONFIG = socket.AI_ADDRCONFIG
583 583 self.log.warn('Monkeypatching socket to fix tornado bug')
584 584 del(socket.AI_ADDRCONFIG)
585 585 try:
586 586 # retry the tornado call without AI_ADDRCONFIG flags
587 587 self.http_server.listen(port, self.ip)
588 588 except socket.error as e2:
589 589 e = e2
590 590 else:
591 591 self.port = port
592 592 success = True
593 593 break
594 594 # restore the monekypatch
595 595 socket.AI_ADDRCONFIG = saved_AI_ADDRCONFIG
596 596 if e.errno != errno.EADDRINUSE:
597 597 raise
598 598 self.log.info('The port %i is already in use, trying another random port.' % port)
599 599 else:
600 600 self.port = port
601 601 success = True
602 602 break
603 603 if not success:
604 604 self.log.critical('ERROR: the notebook server could not be started because '
605 605 'no available port could be found.')
606 606 self.exit(1)
607 607
608 608 def init_signal(self):
609 609 if not sys.platform.startswith('win'):
610 610 signal.signal(signal.SIGINT, self._handle_sigint)
611 611 signal.signal(signal.SIGTERM, self._signal_stop)
612 612 if hasattr(signal, 'SIGUSR1'):
613 613 # Windows doesn't support SIGUSR1
614 614 signal.signal(signal.SIGUSR1, self._signal_info)
615 615 if hasattr(signal, 'SIGINFO'):
616 616 # only on BSD-based systems
617 617 signal.signal(signal.SIGINFO, self._signal_info)
618 618
619 619 def _handle_sigint(self, sig, frame):
620 620 """SIGINT handler spawns confirmation dialog"""
621 621 # register more forceful signal handler for ^C^C case
622 622 signal.signal(signal.SIGINT, self._signal_stop)
623 623 # request confirmation dialog in bg thread, to avoid
624 624 # blocking the App
625 625 thread = threading.Thread(target=self._confirm_exit)
626 626 thread.daemon = True
627 627 thread.start()
628 628
629 629 def _restore_sigint_handler(self):
630 630 """callback for restoring original SIGINT handler"""
631 631 signal.signal(signal.SIGINT, self._handle_sigint)
632 632
633 633 def _confirm_exit(self):
634 634 """confirm shutdown on ^C
635 635
636 636 A second ^C, or answering 'y' within 5s will cause shutdown,
637 637 otherwise original SIGINT handler will be restored.
638 638
639 639 This doesn't work on Windows.
640 640 """
641 641 # FIXME: remove this delay when pyzmq dependency is >= 2.1.11
642 642 time.sleep(0.1)
643 643 info = self.log.info
644 644 info('interrupted')
645 645 print self.notebook_info()
646 646 sys.stdout.write("Shutdown this notebook server (y/[n])? ")
647 647 sys.stdout.flush()
648 648 r,w,x = select.select([sys.stdin], [], [], 5)
649 649 if r:
650 650 line = sys.stdin.readline()
651 651 if line.lower().startswith('y'):
652 652 self.log.critical("Shutdown confirmed")
653 653 ioloop.IOLoop.instance().stop()
654 654 return
655 655 else:
656 656 print "No answer for 5s:",
657 657 print "resuming operation..."
658 658 # no answer, or answer is no:
659 659 # set it back to original SIGINT handler
660 660 # use IOLoop.add_callback because signal.signal must be called
661 661 # from main thread
662 662 ioloop.IOLoop.instance().add_callback(self._restore_sigint_handler)
663 663
664 664 def _signal_stop(self, sig, frame):
665 665 self.log.critical("received signal %s, stopping", sig)
666 666 ioloop.IOLoop.instance().stop()
667 667
668 668 def _signal_info(self, sig, frame):
669 669 print self.notebook_info()
670 670
671 671 def init_components(self):
672 672 """Check the components submodule, and warn if it's unclean"""
673 673 status = submodule.check_submodule_status()
674 674 if status == 'missing':
675 675 self.log.warn("components submodule missing, running `git submodule update`")
676 676 submodule.update_submodules(submodule.ipython_parent())
677 677 elif status == 'unclean':
678 678 self.log.warn("components submodule unclean, you may see 404s on static/components")
679 679 self.log.warn("run `setup.py submodule` or `git submodule update` to update")
680 680
681 681
682 682 @catch_config_error
683 683 def initialize(self, argv=None):
684 684 self.init_logging()
685 685 super(NotebookApp, self).initialize(argv)
686 686 self.init_configurables()
687 687 self.init_components()
688 688 self.init_webapp()
689 689 self.init_signal()
690 690
691 691 def cleanup_kernels(self):
692 692 """Shutdown all kernels.
693 693
694 694 The kernels will shutdown themselves when this process no longer exists,
695 695 but explicit shutdown allows the KernelManagers to cleanup the connection files.
696 696 """
697 697 self.log.info('Shutting down kernels')
698 698 self.kernel_manager.shutdown_all()
699 699
700 700 def notebook_info(self):
701 701 "Return the current working directory and the server url information"
702 702 mgr_info = self.notebook_manager.info_string() + "\n"
703 703 return mgr_info +"The IPython Notebook is running at: %s" % self._url
704 704
705 705 def start(self):
706 706 """ Start the IPython Notebook server app, after initialization
707 707
708 708 This method takes no arguments so all configuration and initialization
709 709 must be done prior to calling this method."""
710 710 ip = self.ip if self.ip else '[all ip addresses on your system]'
711 711 proto = 'https' if self.certfile else 'http'
712 712 info = self.log.info
713 713 self._url = "%s://%s:%i%s" % (proto, ip, self.port,
714 714 self.base_project_url)
715 715 for line in self.notebook_info().split("\n"):
716 716 info(line)
717 717 info("Use Control-C to stop this server and shut down all kernels.")
718 718
719 719 if self.open_browser or self.file_to_run:
720 720 ip = self.ip or LOCALHOST
721 721 try:
722 722 browser = webbrowser.get(self.browser or None)
723 723 except webbrowser.Error as e:
724 724 self.log.warn('No web browser found: %s.' % e)
725 725 browser = None
726 726
727 727 if self.file_to_run:
728 728 name, _ = os.path.splitext(os.path.basename(self.file_to_run))
729 729 url = self.notebook_manager.rev_mapping.get(name, '')
730 730 else:
731 731 url = ''
732 732 if browser:
733 733 b = lambda : browser.open("%s://%s:%i%s%s" % (proto, ip,
734 734 self.port, self.base_project_url, url), new=2)
735 735 threading.Thread(target=b).start()
736 736 try:
737 737 ioloop.IOLoop.instance().start()
738 738 except KeyboardInterrupt:
739 739 info("Interrupted...")
740 740 finally:
741 741 self.cleanup_kernels()
742 742
743 743
744 744 #-----------------------------------------------------------------------------
745 745 # Main entry point
746 746 #-----------------------------------------------------------------------------
747 747
748 def launch_new_instance():
749 app = NotebookApp.instance()
750 app.initialize()
751 app.start()
748 launch_new_instance = NotebookApp.launch_new_instance
752 749
@@ -1,212 +1,207 b''
1 1 #!/usr/bin/env python
2 2 """NBConvert is a utility for conversion of IPYNB files.
3 3
4 4 Commandline interface for the NBConvert conversion utility. Read the
5 5 readme.rst for usage information
6 6 """
7 7 #-----------------------------------------------------------------------------
8 8 #Copyright (c) 2013, the IPython Development Team.
9 9 #
10 10 #Distributed under the terms of the Modified BSD License.
11 11 #
12 12 #The full license is in the file COPYING.txt, distributed with this software.
13 13 #-----------------------------------------------------------------------------
14 14
15 15 #-----------------------------------------------------------------------------
16 16 #Imports
17 17 #-----------------------------------------------------------------------------
18 18
19 19 #Stdlib imports
20 20 from __future__ import print_function
21 21 import sys
22 22 import io
23 23 import os
24 24
25 25 #From IPython
26 26 from IPython.config.application import Application
27 27 from IPython.utils.traitlets import Bool
28 28
29 29 from .exporters.export import export_by_name
30 30 from .exporters.exporter import Exporter
31 31 from .transformers import extractfigure
32 32 from .utils.config import GlobalConfigurable
33 33
34 34 #-----------------------------------------------------------------------------
35 35 #Globals and constants
36 36 #-----------------------------------------------------------------------------
37 37
38 38 #'Keys in resources' user prompt.
39 39 KEYS_PROMPT_HEAD = "====================== Keys in Resources =================================="
40 40 KEYS_PROMPT_BODY = """
41 41 ===========================================================================
42 42 You are responsible for writting these files into the appropriate
43 43 directorie(s) if need be. If you do not want to see this message, enable
44 44 the 'write' (boolean) flag of the converter.
45 45 ===========================================================================
46 46 """
47 47
48 48 #-----------------------------------------------------------------------------
49 49 #Classes and functions
50 50 #-----------------------------------------------------------------------------
51 51
52 52 class NbConvertApp(Application):
53 53 """Application used to convert to and from notebook file type (*.ipynb)"""
54 54
55 55 stdout = Bool(
56 56 False, config=True,
57 57 help="""Whether to print the converted IPYNB file to stdout
58 58 use full do diff files without actually writing a new file"""
59 59 )
60 60
61 61 write = Bool(
62 62 True, config=True,
63 63 help="""Should the converted notebook file be written to disk
64 64 along with potential extracted resources."""
65 65 )
66 66
67 67 aliases = {
68 68 'stdout':'NbConvertApp.stdout',
69 69 'write':'NbConvertApp.write',
70 70 }
71 71
72 72 flags = {}
73 73
74 74 flags['stdout'] = (
75 75 {'NbConvertApp' : {'stdout' : True}},
76 76 """Print converted file to stdout, equivalent to --stdout=True
77 77 """
78 78 )
79 79
80 80 flags['no-write'] = (
81 81 {'NbConvertApp' : {'write' : True}},
82 82 """Do not write to disk, equivalent to --write=False
83 83 """
84 84 )
85 85
86 86
87 87 def __init__(self, **kwargs):
88 88 """Public constructor"""
89 89
90 90 #Call base class
91 91 super(NbConvertApp, self).__init__(**kwargs)
92 92
93 93 #Register class here to have help with help all
94 94 self.classes.insert(0, Exporter)
95 95 self.classes.insert(0, GlobalConfigurable)
96 96
97 97
98 98 def start(self, argv=None):
99 99 """Entrypoint of NbConvert application.
100 100
101 101 Parameters
102 102 ----------
103 103 argv : list
104 104 Commandline arguments
105 105 """
106 106
107 107 #Parse the commandline options.
108 108 self.parse_command_line(argv)
109 109
110 110 #Call base
111 111 super(NbConvertApp, self).start()
112 112
113 113 #The last arguments in list will be used by nbconvert
114 114 if len(self.extra_args) is not 3:
115 115 print( "Wrong number of arguments, use --help flag for usage", file=sys.stderr)
116 116 sys.exit(-1)
117 117 export_type = (self.extra_args)[1]
118 118 ipynb_file = (self.extra_args)[2]
119 119
120 120 #Export
121 121 return_value = export_by_name(export_type, ipynb_file)
122 122 if return_value is None:
123 123 print("Error: '%s' template not found." % export_type)
124 124 return
125 125 else:
126 126 (output, resources, exporter) = return_value
127 127
128 128 #TODO: Allow user to set output directory and file.
129 129 destination_filename = None
130 130 destination_directory = None
131 131 if self.write:
132 132
133 133 #Get the file name without the '.ipynb' (6 chars) extension and then
134 134 #remove any addition periods and spaces. The resulting name will
135 135 #be used to create the directory that the files will be exported
136 136 #into.
137 137 out_root = ipynb_file[:-6].replace('.', '_').replace(' ', '_')
138 138 destination_filename = os.path.join(out_root+'.'+exporter.file_extension)
139 139
140 140 destination_directory = out_root+'_files'
141 141 if not os.path.exists(destination_directory):
142 142 os.mkdir(destination_directory)
143 143
144 144 #Write the results
145 145 if self.stdout or not (destination_filename is None and destination_directory is None):
146 146 self._write_results(output, resources, destination_filename, destination_directory)
147 147
148 148
149 149 def _write_results(self, output, resources, destination_filename=None, destination_directory=None):
150 150 """Output the conversion results to the console and/or filesystem
151 151
152 152 Parameters
153 153 ----------
154 154 output : str
155 155 Output of conversion
156 156 resources : dictionary
157 157 Additional input/output used by the transformers. For
158 158 example, the ExtractFigure transformer outputs the
159 159 figures it extracts into this dictionary. This method
160 160 relies on the figures being in this dictionary when
161 161 attempting to write the figures to the file system.
162 162 destination_filename : str, Optional
163 163 Filename to write output into. If None, output is not
164 164 written to a file.
165 165 destination_directory : str, Optional
166 166 Directory to write notebook data (i.e. figures) to. If
167 167 None, figures are not written to the file system.
168 168 """
169 169
170 170 if self.stdout:
171 171 print(output.encode('utf-8'))
172 172
173 173 #Write file output from conversion.
174 174 if not destination_filename is None:
175 175 with io.open(destination_filename, 'w') as f:
176 176 f.write(output)
177 177
178 178 #Get the key names used by the extract figure transformer
179 179 figures_key = extractfigure.FIGURES_KEY
180 180 binary_key = extractfigure.BINARY_KEY
181 181 text_key = extractfigure.TEXT_KEY
182 182
183 183 #Output any associate figures into the same "root" directory.
184 184 binkeys = resources.get(figures_key, {}).get(binary_key,{}).keys()
185 185 textkeys = resources.get(figures_key, {}).get(text_key,{}).keys()
186 186 if binkeys or textkeys :
187 187 if not destination_directory is None:
188 188 for key in binkeys:
189 189 with io.open(os.path.join(destination_directory, key), 'wb') as f:
190 190 f.write(resources[figures_key][binary_key][key])
191 191 for key in textkeys:
192 192 with io.open(os.path.join(destination_directory, key), 'w') as f:
193 193 f.write(resources[figures_key][text_key][key])
194 194
195 195 #Figures that weren't exported which will need to be created by the
196 196 #user. Tell the user what figures these are.
197 197 if self.stdout:
198 198 print(KEYS_PROMPT_HEAD, file=sys.stderr)
199 199 print(resources[figures_key].keys(), file=sys.stderr)
200 200 print(KEYS_PROMPT_BODY , file=sys.stderr)
201 201
202 202 #-----------------------------------------------------------------------------
203 203 # Main entry point
204 204 #-----------------------------------------------------------------------------
205 205
206 def launch_new_instance():
207 """Application entry point"""
208
209 app = NbConvertApp.instance()
210 app.description = __doc__
211 app.start(argv=sys.argv)
206 launch_new_instance = NbConvertApp.launch_new_instance
212 207
@@ -1,620 +1,615 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3 """
4 4 The ipcluster application.
5 5
6 6 Authors:
7 7
8 8 * Brian Granger
9 9 * MinRK
10 10
11 11 """
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Copyright (C) 2008-2011 The IPython Development Team
15 15 #
16 16 # Distributed under the terms of the BSD License. The full license is in
17 17 # the file COPYING, distributed as part of this software.
18 18 #-----------------------------------------------------------------------------
19 19
20 20 #-----------------------------------------------------------------------------
21 21 # Imports
22 22 #-----------------------------------------------------------------------------
23 23
24 24 import errno
25 25 import logging
26 26 import os
27 27 import re
28 28 import signal
29 29
30 30 from subprocess import check_call, CalledProcessError, PIPE
31 31 import zmq
32 32 from zmq.eventloop import ioloop
33 33
34 34 from IPython.config.application import Application, boolean_flag, catch_config_error
35 35 from IPython.config.loader import Config
36 36 from IPython.core.application import BaseIPythonApplication
37 37 from IPython.core.profiledir import ProfileDir
38 38 from IPython.utils.daemonize import daemonize
39 39 from IPython.utils.importstring import import_item
40 40 from IPython.utils.sysinfo import num_cpus
41 41 from IPython.utils.traitlets import (Integer, Unicode, Bool, CFloat, Dict, List, Any,
42 42 DottedObjectName)
43 43
44 44 from IPython.parallel.apps.baseapp import (
45 45 BaseParallelApplication,
46 46 PIDFileError,
47 47 base_flags, base_aliases
48 48 )
49 49
50 50
51 51 #-----------------------------------------------------------------------------
52 52 # Module level variables
53 53 #-----------------------------------------------------------------------------
54 54
55 55
56 56 default_config_file_name = u'ipcluster_config.py'
57 57
58 58
59 59 _description = """Start an IPython cluster for parallel computing.
60 60
61 61 An IPython cluster consists of 1 controller and 1 or more engines.
62 62 This command automates the startup of these processes using a wide range of
63 63 startup methods (SSH, local processes, PBS, mpiexec, SGE, LSF, HTCondor,
64 64 Windows HPC Server 2008). To start a cluster with 4 engines on your
65 65 local host simply do 'ipcluster start --n=4'. For more complex usage
66 66 you will typically do 'ipython profile create mycluster --parallel', then edit
67 67 configuration files, followed by 'ipcluster start --profile=mycluster --n=4'.
68 68 """
69 69
70 70 _main_examples = """
71 71 ipcluster start --n=4 # start a 4 node cluster on localhost
72 72 ipcluster start -h # show the help string for the start subcmd
73 73
74 74 ipcluster stop -h # show the help string for the stop subcmd
75 75 ipcluster engines -h # show the help string for the engines subcmd
76 76 """
77 77
78 78 _start_examples = """
79 79 ipython profile create mycluster --parallel # create mycluster profile
80 80 ipcluster start --profile=mycluster --n=4 # start mycluster with 4 nodes
81 81 """
82 82
83 83 _stop_examples = """
84 84 ipcluster stop --profile=mycluster # stop a running cluster by profile name
85 85 """
86 86
87 87 _engines_examples = """
88 88 ipcluster engines --profile=mycluster --n=4 # start 4 engines only
89 89 """
90 90
91 91
92 92 # Exit codes for ipcluster
93 93
94 94 # This will be the exit code if the ipcluster appears to be running because
95 95 # a .pid file exists
96 96 ALREADY_STARTED = 10
97 97
98 98
99 99 # This will be the exit code if ipcluster stop is run, but there is not .pid
100 100 # file to be found.
101 101 ALREADY_STOPPED = 11
102 102
103 103 # This will be the exit code if ipcluster engines is run, but there is not .pid
104 104 # file to be found.
105 105 NO_CLUSTER = 12
106 106
107 107
108 108 #-----------------------------------------------------------------------------
109 109 # Utilities
110 110 #-----------------------------------------------------------------------------
111 111
112 112 def find_launcher_class(clsname, kind):
113 113 """Return a launcher for a given clsname and kind.
114 114
115 115 Parameters
116 116 ==========
117 117 clsname : str
118 118 The full name of the launcher class, either with or without the
119 119 module path, or an abbreviation (MPI, SSH, SGE, PBS, LSF, HTCondor
120 120 WindowsHPC).
121 121 kind : str
122 122 Either 'EngineSet' or 'Controller'.
123 123 """
124 124 if '.' not in clsname:
125 125 # not a module, presume it's the raw name in apps.launcher
126 126 if kind and kind not in clsname:
127 127 # doesn't match necessary full class name, assume it's
128 128 # just 'PBS' or 'MPI' etc prefix:
129 129 clsname = clsname + kind + 'Launcher'
130 130 clsname = 'IPython.parallel.apps.launcher.'+clsname
131 131 klass = import_item(clsname)
132 132 return klass
133 133
134 134 #-----------------------------------------------------------------------------
135 135 # Main application
136 136 #-----------------------------------------------------------------------------
137 137
138 138 start_help = """Start an IPython cluster for parallel computing
139 139
140 140 Start an ipython cluster by its profile name or cluster
141 141 directory. Cluster directories contain configuration, log and
142 142 security related files and are named using the convention
143 143 'profile_<name>' and should be creating using the 'start'
144 144 subcommand of 'ipcluster'. If your cluster directory is in
145 145 the cwd or the ipython directory, you can simply refer to it
146 146 using its profile name, 'ipcluster start --n=4 --profile=<profile>`,
147 147 otherwise use the 'profile-dir' option.
148 148 """
149 149 stop_help = """Stop a running IPython cluster
150 150
151 151 Stop a running ipython cluster by its profile name or cluster
152 152 directory. Cluster directories are named using the convention
153 153 'profile_<name>'. If your cluster directory is in
154 154 the cwd or the ipython directory, you can simply refer to it
155 155 using its profile name, 'ipcluster stop --profile=<profile>`, otherwise
156 156 use the '--profile-dir' option.
157 157 """
158 158 engines_help = """Start engines connected to an existing IPython cluster
159 159
160 160 Start one or more engines to connect to an existing Cluster
161 161 by profile name or cluster directory.
162 162 Cluster directories contain configuration, log and
163 163 security related files and are named using the convention
164 164 'profile_<name>' and should be creating using the 'start'
165 165 subcommand of 'ipcluster'. If your cluster directory is in
166 166 the cwd or the ipython directory, you can simply refer to it
167 167 using its profile name, 'ipcluster engines --n=4 --profile=<profile>`,
168 168 otherwise use the 'profile-dir' option.
169 169 """
170 170 stop_aliases = dict(
171 171 signal='IPClusterStop.signal',
172 172 )
173 173 stop_aliases.update(base_aliases)
174 174
175 175 class IPClusterStop(BaseParallelApplication):
176 176 name = u'ipcluster'
177 177 description = stop_help
178 178 examples = _stop_examples
179 179 config_file_name = Unicode(default_config_file_name)
180 180
181 181 signal = Integer(signal.SIGINT, config=True,
182 182 help="signal to use for stopping processes.")
183 183
184 184 aliases = Dict(stop_aliases)
185 185
186 186 def start(self):
187 187 """Start the app for the stop subcommand."""
188 188 try:
189 189 pid = self.get_pid_from_file()
190 190 except PIDFileError:
191 191 self.log.critical(
192 192 'Could not read pid file, cluster is probably not running.'
193 193 )
194 194 # Here I exit with a unusual exit status that other processes
195 195 # can watch for to learn how I existed.
196 196 self.remove_pid_file()
197 197 self.exit(ALREADY_STOPPED)
198 198
199 199 if not self.check_pid(pid):
200 200 self.log.critical(
201 201 'Cluster [pid=%r] is not running.' % pid
202 202 )
203 203 self.remove_pid_file()
204 204 # Here I exit with a unusual exit status that other processes
205 205 # can watch for to learn how I existed.
206 206 self.exit(ALREADY_STOPPED)
207 207
208 208 elif os.name=='posix':
209 209 sig = self.signal
210 210 self.log.info(
211 211 "Stopping cluster [pid=%r] with [signal=%r]" % (pid, sig)
212 212 )
213 213 try:
214 214 os.kill(pid, sig)
215 215 except OSError:
216 216 self.log.error("Stopping cluster failed, assuming already dead.",
217 217 exc_info=True)
218 218 self.remove_pid_file()
219 219 elif os.name=='nt':
220 220 try:
221 221 # kill the whole tree
222 222 p = check_call(['taskkill', '-pid', str(pid), '-t', '-f'], stdout=PIPE,stderr=PIPE)
223 223 except (CalledProcessError, OSError):
224 224 self.log.error("Stopping cluster failed, assuming already dead.",
225 225 exc_info=True)
226 226 self.remove_pid_file()
227 227
228 228 engine_aliases = {}
229 229 engine_aliases.update(base_aliases)
230 230 engine_aliases.update(dict(
231 231 n='IPClusterEngines.n',
232 232 engines = 'IPClusterEngines.engine_launcher_class',
233 233 daemonize = 'IPClusterEngines.daemonize',
234 234 ))
235 235 engine_flags = {}
236 236 engine_flags.update(base_flags)
237 237
238 238 engine_flags.update(dict(
239 239 daemonize=(
240 240 {'IPClusterEngines' : {'daemonize' : True}},
241 241 """run the cluster into the background (not available on Windows)""",
242 242 )
243 243 ))
244 244 class IPClusterEngines(BaseParallelApplication):
245 245
246 246 name = u'ipcluster'
247 247 description = engines_help
248 248 examples = _engines_examples
249 249 usage = None
250 250 config_file_name = Unicode(default_config_file_name)
251 251 default_log_level = logging.INFO
252 252 classes = List()
253 253 def _classes_default(self):
254 254 from IPython.parallel.apps import launcher
255 255 launchers = launcher.all_launchers
256 256 eslaunchers = [ l for l in launchers if 'EngineSet' in l.__name__]
257 257 return [ProfileDir]+eslaunchers
258 258
259 259 n = Integer(num_cpus(), config=True,
260 260 help="""The number of engines to start. The default is to use one for each
261 261 CPU on your machine""")
262 262
263 263 engine_launcher = Any(config=True, help="Deprecated, use engine_launcher_class")
264 264 def _engine_launcher_changed(self, name, old, new):
265 265 if isinstance(new, basestring):
266 266 self.log.warn("WARNING: %s.engine_launcher is deprecated as of 0.12,"
267 267 " use engine_launcher_class" % self.__class__.__name__)
268 268 self.engine_launcher_class = new
269 269 engine_launcher_class = DottedObjectName('LocalEngineSetLauncher',
270 270 config=True,
271 271 help="""The class for launching a set of Engines. Change this value
272 272 to use various batch systems to launch your engines, such as PBS,SGE,MPI,etc.
273 273 Each launcher class has its own set of configuration options, for making sure
274 274 it will work in your environment.
275 275
276 276 You can also write your own launcher, and specify it's absolute import path,
277 277 as in 'mymodule.launcher.FTLEnginesLauncher`.
278 278
279 279 IPython's bundled examples include:
280 280
281 281 Local : start engines locally as subprocesses [default]
282 282 MPI : use mpiexec to launch engines in an MPI environment
283 283 PBS : use PBS (qsub) to submit engines to a batch queue
284 284 SGE : use SGE (qsub) to submit engines to a batch queue
285 285 LSF : use LSF (bsub) to submit engines to a batch queue
286 286 SSH : use SSH to start the controller
287 287 Note that SSH does *not* move the connection files
288 288 around, so you will likely have to do this manually
289 289 unless the machines are on a shared file system.
290 290 HTCondor : use HTCondor to submit engines to a batch queue
291 291 WindowsHPC : use Windows HPC
292 292
293 293 If you are using one of IPython's builtin launchers, you can specify just the
294 294 prefix, e.g:
295 295
296 296 c.IPClusterEngines.engine_launcher_class = 'SSH'
297 297
298 298 or:
299 299
300 300 ipcluster start --engines=MPI
301 301
302 302 """
303 303 )
304 304 daemonize = Bool(False, config=True,
305 305 help="""Daemonize the ipcluster program. This implies --log-to-file.
306 306 Not available on Windows.
307 307 """)
308 308
309 309 def _daemonize_changed(self, name, old, new):
310 310 if new:
311 311 self.log_to_file = True
312 312
313 313 early_shutdown = Integer(30, config=True, help="The timeout (in seconds)")
314 314 _stopping = False
315 315
316 316 aliases = Dict(engine_aliases)
317 317 flags = Dict(engine_flags)
318 318
319 319 @catch_config_error
320 320 def initialize(self, argv=None):
321 321 super(IPClusterEngines, self).initialize(argv)
322 322 self.init_signal()
323 323 self.init_launchers()
324 324
325 325 def init_launchers(self):
326 326 self.engine_launcher = self.build_launcher(self.engine_launcher_class, 'EngineSet')
327 327
328 328 def init_signal(self):
329 329 # Setup signals
330 330 signal.signal(signal.SIGINT, self.sigint_handler)
331 331
332 332 def build_launcher(self, clsname, kind=None):
333 333 """import and instantiate a Launcher based on importstring"""
334 334 try:
335 335 klass = find_launcher_class(clsname, kind)
336 336 except (ImportError, KeyError):
337 337 self.log.fatal("Could not import launcher class: %r"%clsname)
338 338 self.exit(1)
339 339
340 340 launcher = klass(
341 341 work_dir=u'.', parent=self, log=self.log,
342 342 profile_dir=self.profile_dir.location, cluster_id=self.cluster_id,
343 343 )
344 344 return launcher
345 345
346 346 def engines_started_ok(self):
347 347 self.log.info("Engines appear to have started successfully")
348 348 self.early_shutdown = 0
349 349
350 350 def start_engines(self):
351 351 # Some EngineSetLaunchers ignore `n` and use their own engine count, such as SSH:
352 352 n = getattr(self.engine_launcher, 'engine_count', self.n)
353 353 self.log.info("Starting %s Engines with %s", n, self.engine_launcher_class)
354 354 self.engine_launcher.start(self.n)
355 355 self.engine_launcher.on_stop(self.engines_stopped_early)
356 356 if self.early_shutdown:
357 357 ioloop.DelayedCallback(self.engines_started_ok, self.early_shutdown*1000, self.loop).start()
358 358
359 359 def engines_stopped_early(self, r):
360 360 if self.early_shutdown and not self._stopping:
361 361 self.log.error("""
362 362 Engines shutdown early, they probably failed to connect.
363 363
364 364 Check the engine log files for output.
365 365
366 366 If your controller and engines are not on the same machine, you probably
367 367 have to instruct the controller to listen on an interface other than localhost.
368 368
369 369 You can set this by adding "--ip='*'" to your ControllerLauncher.controller_args.
370 370
371 371 Be sure to read our security docs before instructing your controller to listen on
372 372 a public interface.
373 373 """)
374 374 self.stop_launchers()
375 375
376 376 return self.engines_stopped(r)
377 377
378 378 def engines_stopped(self, r):
379 379 return self.loop.stop()
380 380
381 381 def stop_engines(self):
382 382 if self.engine_launcher.running:
383 383 self.log.info("Stopping Engines...")
384 384 d = self.engine_launcher.stop()
385 385 return d
386 386 else:
387 387 return None
388 388
389 389 def stop_launchers(self, r=None):
390 390 if not self._stopping:
391 391 self._stopping = True
392 392 self.log.error("IPython cluster: stopping")
393 393 self.stop_engines()
394 394 # Wait a few seconds to let things shut down.
395 395 dc = ioloop.DelayedCallback(self.loop.stop, 3000, self.loop)
396 396 dc.start()
397 397
398 398 def sigint_handler(self, signum, frame):
399 399 self.log.debug("SIGINT received, stopping launchers...")
400 400 self.stop_launchers()
401 401
402 402 def start_logging(self):
403 403 # Remove old log files of the controller and engine
404 404 if self.clean_logs:
405 405 log_dir = self.profile_dir.log_dir
406 406 for f in os.listdir(log_dir):
407 407 if re.match(r'ip(engine|controller)z-\d+\.(log|err|out)',f):
408 408 os.remove(os.path.join(log_dir, f))
409 409 # This will remove old log files for ipcluster itself
410 410 # super(IPBaseParallelApplication, self).start_logging()
411 411
412 412 def start(self):
413 413 """Start the app for the engines subcommand."""
414 414 self.log.info("IPython cluster: started")
415 415 # First see if the cluster is already running
416 416
417 417 # Now log and daemonize
418 418 self.log.info(
419 419 'Starting engines with [daemon=%r]' % self.daemonize
420 420 )
421 421 # TODO: Get daemonize working on Windows or as a Windows Server.
422 422 if self.daemonize:
423 423 if os.name=='posix':
424 424 daemonize()
425 425
426 426 dc = ioloop.DelayedCallback(self.start_engines, 0, self.loop)
427 427 dc.start()
428 428 # Now write the new pid file AFTER our new forked pid is active.
429 429 # self.write_pid_file()
430 430 try:
431 431 self.loop.start()
432 432 except KeyboardInterrupt:
433 433 pass
434 434 except zmq.ZMQError as e:
435 435 if e.errno == errno.EINTR:
436 436 pass
437 437 else:
438 438 raise
439 439
440 440 start_aliases = {}
441 441 start_aliases.update(engine_aliases)
442 442 start_aliases.update(dict(
443 443 delay='IPClusterStart.delay',
444 444 controller = 'IPClusterStart.controller_launcher_class',
445 445 ))
446 446 start_aliases['clean-logs'] = 'IPClusterStart.clean_logs'
447 447
448 448 class IPClusterStart(IPClusterEngines):
449 449
450 450 name = u'ipcluster'
451 451 description = start_help
452 452 examples = _start_examples
453 453 default_log_level = logging.INFO
454 454 auto_create = Bool(True, config=True,
455 455 help="whether to create the profile_dir if it doesn't exist")
456 456 classes = List()
457 457 def _classes_default(self,):
458 458 from IPython.parallel.apps import launcher
459 459 return [ProfileDir] + [IPClusterEngines] + launcher.all_launchers
460 460
461 461 clean_logs = Bool(True, config=True,
462 462 help="whether to cleanup old logs before starting")
463 463
464 464 delay = CFloat(1., config=True,
465 465 help="delay (in s) between starting the controller and the engines")
466 466
467 467 controller_launcher = Any(config=True, help="Deprecated, use controller_launcher_class")
468 468 def _controller_launcher_changed(self, name, old, new):
469 469 if isinstance(new, basestring):
470 470 # old 0.11-style config
471 471 self.log.warn("WARNING: %s.controller_launcher is deprecated as of 0.12,"
472 472 " use controller_launcher_class" % self.__class__.__name__)
473 473 self.controller_launcher_class = new
474 474 controller_launcher_class = DottedObjectName('LocalControllerLauncher',
475 475 config=True,
476 476 help="""The class for launching a Controller. Change this value if you want
477 477 your controller to also be launched by a batch system, such as PBS,SGE,MPI,etc.
478 478
479 479 Each launcher class has its own set of configuration options, for making sure
480 480 it will work in your environment.
481 481
482 482 Note that using a batch launcher for the controller *does not* put it
483 483 in the same batch job as the engines, so they will still start separately.
484 484
485 485 IPython's bundled examples include:
486 486
487 487 Local : start engines locally as subprocesses
488 488 MPI : use mpiexec to launch the controller in an MPI universe
489 489 PBS : use PBS (qsub) to submit the controller to a batch queue
490 490 SGE : use SGE (qsub) to submit the controller to a batch queue
491 491 LSF : use LSF (bsub) to submit the controller to a batch queue
492 492 HTCondor : use HTCondor to submit the controller to a batch queue
493 493 SSH : use SSH to start the controller
494 494 WindowsHPC : use Windows HPC
495 495
496 496 If you are using one of IPython's builtin launchers, you can specify just the
497 497 prefix, e.g:
498 498
499 499 c.IPClusterStart.controller_launcher_class = 'SSH'
500 500
501 501 or:
502 502
503 503 ipcluster start --controller=MPI
504 504
505 505 """
506 506 )
507 507 reset = Bool(False, config=True,
508 508 help="Whether to reset config files as part of '--create'."
509 509 )
510 510
511 511 # flags = Dict(flags)
512 512 aliases = Dict(start_aliases)
513 513
514 514 def init_launchers(self):
515 515 self.controller_launcher = self.build_launcher(self.controller_launcher_class, 'Controller')
516 516 self.engine_launcher = self.build_launcher(self.engine_launcher_class, 'EngineSet')
517 517
518 518 def engines_stopped(self, r):
519 519 """prevent parent.engines_stopped from stopping everything on engine shutdown"""
520 520 pass
521 521
522 522 def start_controller(self):
523 523 self.log.info("Starting Controller with %s", self.controller_launcher_class)
524 524 self.controller_launcher.on_stop(self.stop_launchers)
525 525 self.controller_launcher.start()
526 526
527 527 def stop_controller(self):
528 528 # self.log.info("In stop_controller")
529 529 if self.controller_launcher and self.controller_launcher.running:
530 530 return self.controller_launcher.stop()
531 531
532 532 def stop_launchers(self, r=None):
533 533 if not self._stopping:
534 534 self.stop_controller()
535 535 super(IPClusterStart, self).stop_launchers()
536 536
537 537 def start(self):
538 538 """Start the app for the start subcommand."""
539 539 # First see if the cluster is already running
540 540 try:
541 541 pid = self.get_pid_from_file()
542 542 except PIDFileError:
543 543 pass
544 544 else:
545 545 if self.check_pid(pid):
546 546 self.log.critical(
547 547 'Cluster is already running with [pid=%s]. '
548 548 'use "ipcluster stop" to stop the cluster.' % pid
549 549 )
550 550 # Here I exit with a unusual exit status that other processes
551 551 # can watch for to learn how I existed.
552 552 self.exit(ALREADY_STARTED)
553 553 else:
554 554 self.remove_pid_file()
555 555
556 556
557 557 # Now log and daemonize
558 558 self.log.info(
559 559 'Starting ipcluster with [daemon=%r]' % self.daemonize
560 560 )
561 561 # TODO: Get daemonize working on Windows or as a Windows Server.
562 562 if self.daemonize:
563 563 if os.name=='posix':
564 564 daemonize()
565 565
566 566 dc = ioloop.DelayedCallback(self.start_controller, 0, self.loop)
567 567 dc.start()
568 568 dc = ioloop.DelayedCallback(self.start_engines, 1000*self.delay, self.loop)
569 569 dc.start()
570 570 # Now write the new pid file AFTER our new forked pid is active.
571 571 self.write_pid_file()
572 572 try:
573 573 self.loop.start()
574 574 except KeyboardInterrupt:
575 575 pass
576 576 except zmq.ZMQError as e:
577 577 if e.errno == errno.EINTR:
578 578 pass
579 579 else:
580 580 raise
581 581 finally:
582 582 self.remove_pid_file()
583 583
584 584 base='IPython.parallel.apps.ipclusterapp.IPCluster'
585 585
586 586 class IPClusterApp(BaseIPythonApplication):
587 587 name = u'ipcluster'
588 588 description = _description
589 589 examples = _main_examples
590 590
591 591 subcommands = {
592 592 'start' : (base+'Start', start_help),
593 593 'stop' : (base+'Stop', stop_help),
594 594 'engines' : (base+'Engines', engines_help),
595 595 }
596 596
597 597 # no aliases or flags for parent App
598 598 aliases = Dict()
599 599 flags = Dict()
600 600
601 601 def start(self):
602 602 if self.subapp is None:
603 603 print "No subcommand specified. Must specify one of: %s"%(self.subcommands.keys())
604 604 print
605 605 self.print_description()
606 606 self.print_subcommands()
607 607 self.exit(1)
608 608 else:
609 609 return self.subapp.start()
610 610
611 def launch_new_instance():
612 """Create and run the IPython cluster."""
613 app = IPClusterApp.instance()
614 app.initialize()
615 app.start()
616
611 launch_new_instance = IPClusterApp.launch_new_instance
617 612
618 613 if __name__ == '__main__':
619 614 launch_new_instance()
620 615
@@ -1,554 +1,551 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3 """
4 4 The IPython controller application.
5 5
6 6 Authors:
7 7
8 8 * Brian Granger
9 9 * MinRK
10 10
11 11 """
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Copyright (C) 2008-2011 The IPython Development Team
15 15 #
16 16 # Distributed under the terms of the BSD License. The full license is in
17 17 # the file COPYING, distributed as part of this software.
18 18 #-----------------------------------------------------------------------------
19 19
20 20 #-----------------------------------------------------------------------------
21 21 # Imports
22 22 #-----------------------------------------------------------------------------
23 23
24 24 from __future__ import with_statement
25 25
26 26 import json
27 27 import os
28 28 import stat
29 29 import sys
30 30
31 31 from multiprocessing import Process
32 32 from signal import signal, SIGINT, SIGABRT, SIGTERM
33 33
34 34 import zmq
35 35 from zmq.devices import ProcessMonitoredQueue
36 36 from zmq.log.handlers import PUBHandler
37 37
38 38 from IPython.core.profiledir import ProfileDir
39 39
40 40 from IPython.parallel.apps.baseapp import (
41 41 BaseParallelApplication,
42 42 base_aliases,
43 43 base_flags,
44 44 catch_config_error,
45 45 )
46 46 from IPython.utils.importstring import import_item
47 47 from IPython.utils.localinterfaces import LOCALHOST, PUBLIC_IPS
48 48 from IPython.utils.traitlets import Instance, Unicode, Bool, List, Dict, TraitError
49 49
50 50 from IPython.kernel.zmq.session import (
51 51 Session, session_aliases, session_flags, default_secure
52 52 )
53 53
54 54 from IPython.parallel.controller.heartmonitor import HeartMonitor
55 55 from IPython.parallel.controller.hub import HubFactory
56 56 from IPython.parallel.controller.scheduler import TaskScheduler,launch_scheduler
57 57 from IPython.parallel.controller.dictdb import DictDB
58 58
59 59 from IPython.parallel.util import split_url, disambiguate_url, set_hwm
60 60
61 61 # conditional import of SQLiteDB / MongoDB backend class
62 62 real_dbs = []
63 63
64 64 try:
65 65 from IPython.parallel.controller.sqlitedb import SQLiteDB
66 66 except ImportError:
67 67 pass
68 68 else:
69 69 real_dbs.append(SQLiteDB)
70 70
71 71 try:
72 72 from IPython.parallel.controller.mongodb import MongoDB
73 73 except ImportError:
74 74 pass
75 75 else:
76 76 real_dbs.append(MongoDB)
77 77
78 78
79 79
80 80 #-----------------------------------------------------------------------------
81 81 # Module level variables
82 82 #-----------------------------------------------------------------------------
83 83
84 84
85 85 #: The default config file name for this application
86 86 default_config_file_name = u'ipcontroller_config.py'
87 87
88 88
89 89 _description = """Start the IPython controller for parallel computing.
90 90
91 91 The IPython controller provides a gateway between the IPython engines and
92 92 clients. The controller needs to be started before the engines and can be
93 93 configured using command line options or using a cluster directory. Cluster
94 94 directories contain config, log and security files and are usually located in
95 95 your ipython directory and named as "profile_name". See the `profile`
96 96 and `profile-dir` options for details.
97 97 """
98 98
99 99 _examples = """
100 100 ipcontroller --ip=192.168.0.1 --port=1000 # listen on ip, port for engines
101 101 ipcontroller --scheme=pure # use the pure zeromq scheduler
102 102 """
103 103
104 104
105 105 #-----------------------------------------------------------------------------
106 106 # The main application
107 107 #-----------------------------------------------------------------------------
108 108 flags = {}
109 109 flags.update(base_flags)
110 110 flags.update({
111 111 'usethreads' : ( {'IPControllerApp' : {'use_threads' : True}},
112 112 'Use threads instead of processes for the schedulers'),
113 113 'sqlitedb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.sqlitedb.SQLiteDB'}},
114 114 'use the SQLiteDB backend'),
115 115 'mongodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.mongodb.MongoDB'}},
116 116 'use the MongoDB backend'),
117 117 'dictdb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.DictDB'}},
118 118 'use the in-memory DictDB backend'),
119 119 'nodb' : ({'HubFactory' : {'db_class' : 'IPython.parallel.controller.dictdb.NoDB'}},
120 120 """use dummy DB backend, which doesn't store any information.
121 121
122 122 This is the default as of IPython 0.13.
123 123
124 124 To enable delayed or repeated retrieval of results from the Hub,
125 125 select one of the true db backends.
126 126 """),
127 127 'reuse' : ({'IPControllerApp' : {'reuse_files' : True}},
128 128 'reuse existing json connection files'),
129 129 'restore' : ({'IPControllerApp' : {'restore_engines' : True, 'reuse_files' : True}},
130 130 'Attempt to restore engines from a JSON file. '
131 131 'For use when resuming a crashed controller'),
132 132 })
133 133
134 134 flags.update(session_flags)
135 135
136 136 aliases = dict(
137 137 ssh = 'IPControllerApp.ssh_server',
138 138 enginessh = 'IPControllerApp.engine_ssh_server',
139 139 location = 'IPControllerApp.location',
140 140
141 141 url = 'HubFactory.url',
142 142 ip = 'HubFactory.ip',
143 143 transport = 'HubFactory.transport',
144 144 port = 'HubFactory.regport',
145 145
146 146 ping = 'HeartMonitor.period',
147 147
148 148 scheme = 'TaskScheduler.scheme_name',
149 149 hwm = 'TaskScheduler.hwm',
150 150 )
151 151 aliases.update(base_aliases)
152 152 aliases.update(session_aliases)
153 153
154 154 class IPControllerApp(BaseParallelApplication):
155 155
156 156 name = u'ipcontroller'
157 157 description = _description
158 158 examples = _examples
159 159 config_file_name = Unicode(default_config_file_name)
160 160 classes = [ProfileDir, Session, HubFactory, TaskScheduler, HeartMonitor, DictDB] + real_dbs
161 161
162 162 # change default to True
163 163 auto_create = Bool(True, config=True,
164 164 help="""Whether to create profile dir if it doesn't exist.""")
165 165
166 166 reuse_files = Bool(False, config=True,
167 167 help="""Whether to reuse existing json connection files.
168 168 If False, connection files will be removed on a clean exit.
169 169 """
170 170 )
171 171 restore_engines = Bool(False, config=True,
172 172 help="""Reload engine state from JSON file
173 173 """
174 174 )
175 175 ssh_server = Unicode(u'', config=True,
176 176 help="""ssh url for clients to use when connecting to the Controller
177 177 processes. It should be of the form: [user@]server[:port]. The
178 178 Controller's listening addresses must be accessible from the ssh server""",
179 179 )
180 180 engine_ssh_server = Unicode(u'', config=True,
181 181 help="""ssh url for engines to use when connecting to the Controller
182 182 processes. It should be of the form: [user@]server[:port]. The
183 183 Controller's listening addresses must be accessible from the ssh server""",
184 184 )
185 185 location = Unicode(u'', config=True,
186 186 help="""The external IP or domain name of the Controller, used for disambiguating
187 187 engine and client connections.""",
188 188 )
189 189 import_statements = List([], config=True,
190 190 help="import statements to be run at startup. Necessary in some environments"
191 191 )
192 192
193 193 use_threads = Bool(False, config=True,
194 194 help='Use threads instead of processes for the schedulers',
195 195 )
196 196
197 197 engine_json_file = Unicode('ipcontroller-engine.json', config=True,
198 198 help="JSON filename where engine connection info will be stored.")
199 199 client_json_file = Unicode('ipcontroller-client.json', config=True,
200 200 help="JSON filename where client connection info will be stored.")
201 201
202 202 def _cluster_id_changed(self, name, old, new):
203 203 super(IPControllerApp, self)._cluster_id_changed(name, old, new)
204 204 self.engine_json_file = "%s-engine.json" % self.name
205 205 self.client_json_file = "%s-client.json" % self.name
206 206
207 207
208 208 # internal
209 209 children = List()
210 210 mq_class = Unicode('zmq.devices.ProcessMonitoredQueue')
211 211
212 212 def _use_threads_changed(self, name, old, new):
213 213 self.mq_class = 'zmq.devices.%sMonitoredQueue'%('Thread' if new else 'Process')
214 214
215 215 write_connection_files = Bool(True,
216 216 help="""Whether to write connection files to disk.
217 217 True in all cases other than runs with `reuse_files=True` *after the first*
218 218 """
219 219 )
220 220
221 221 aliases = Dict(aliases)
222 222 flags = Dict(flags)
223 223
224 224
225 225 def save_connection_dict(self, fname, cdict):
226 226 """save a connection dict to json file."""
227 227 c = self.config
228 228 url = cdict['registration']
229 229 location = cdict['location']
230 230
231 231 if not location:
232 232 if PUBLIC_IPS:
233 233 location = PUBLIC_IPS[-1]
234 234 else:
235 235 self.log.warn("Could not identify this machine's IP, assuming %s."
236 236 " You may need to specify '--location=<external_ip_address>' to help"
237 237 " IPython decide when to connect via loopback." % LOCALHOST)
238 238 location = LOCALHOST
239 239 cdict['location'] = location
240 240 fname = os.path.join(self.profile_dir.security_dir, fname)
241 241 self.log.info("writing connection info to %s", fname)
242 242 with open(fname, 'w') as f:
243 243 f.write(json.dumps(cdict, indent=2))
244 244 os.chmod(fname, stat.S_IRUSR|stat.S_IWUSR)
245 245
246 246 def load_config_from_json(self):
247 247 """load config from existing json connector files."""
248 248 c = self.config
249 249 self.log.debug("loading config from JSON")
250 250
251 251 # load engine config
252 252
253 253 fname = os.path.join(self.profile_dir.security_dir, self.engine_json_file)
254 254 self.log.info("loading connection info from %s", fname)
255 255 with open(fname) as f:
256 256 ecfg = json.loads(f.read())
257 257
258 258 # json gives unicode, Session.key wants bytes
259 259 c.Session.key = ecfg['exec_key'].encode('ascii')
260 260
261 261 xport,ip = ecfg['interface'].split('://')
262 262
263 263 c.HubFactory.engine_ip = ip
264 264 c.HubFactory.engine_transport = xport
265 265
266 266 self.location = ecfg['location']
267 267 if not self.engine_ssh_server:
268 268 self.engine_ssh_server = ecfg['ssh']
269 269
270 270 # load client config
271 271
272 272 fname = os.path.join(self.profile_dir.security_dir, self.client_json_file)
273 273 self.log.info("loading connection info from %s", fname)
274 274 with open(fname) as f:
275 275 ccfg = json.loads(f.read())
276 276
277 277 for key in ('exec_key', 'registration', 'pack', 'unpack'):
278 278 assert ccfg[key] == ecfg[key], "mismatch between engine and client info: %r" % key
279 279
280 280 xport,addr = ccfg['interface'].split('://')
281 281
282 282 c.HubFactory.client_transport = xport
283 283 c.HubFactory.client_ip = ip
284 284 if not self.ssh_server:
285 285 self.ssh_server = ccfg['ssh']
286 286
287 287 # load port config:
288 288 c.HubFactory.regport = ecfg['registration']
289 289 c.HubFactory.hb = (ecfg['hb_ping'], ecfg['hb_pong'])
290 290 c.HubFactory.control = (ccfg['control'], ecfg['control'])
291 291 c.HubFactory.mux = (ccfg['mux'], ecfg['mux'])
292 292 c.HubFactory.task = (ccfg['task'], ecfg['task'])
293 293 c.HubFactory.iopub = (ccfg['iopub'], ecfg['iopub'])
294 294 c.HubFactory.notifier_port = ccfg['notification']
295 295
296 296 def cleanup_connection_files(self):
297 297 if self.reuse_files:
298 298 self.log.debug("leaving JSON connection files for reuse")
299 299 return
300 300 self.log.debug("cleaning up JSON connection files")
301 301 for f in (self.client_json_file, self.engine_json_file):
302 302 f = os.path.join(self.profile_dir.security_dir, f)
303 303 try:
304 304 os.remove(f)
305 305 except Exception as e:
306 306 self.log.error("Failed to cleanup connection file: %s", e)
307 307 else:
308 308 self.log.debug(u"removed %s", f)
309 309
310 310 def load_secondary_config(self):
311 311 """secondary config, loading from JSON and setting defaults"""
312 312 if self.reuse_files:
313 313 try:
314 314 self.load_config_from_json()
315 315 except (AssertionError,IOError) as e:
316 316 self.log.error("Could not load config from JSON: %s" % e)
317 317 else:
318 318 # successfully loaded config from JSON, and reuse=True
319 319 # no need to wite back the same file
320 320 self.write_connection_files = False
321 321
322 322 # switch Session.key default to secure
323 323 default_secure(self.config)
324 324 self.log.debug("Config changed")
325 325 self.log.debug(repr(self.config))
326 326
327 327 def init_hub(self):
328 328 c = self.config
329 329
330 330 self.do_import_statements()
331 331
332 332 try:
333 333 self.factory = HubFactory(config=c, log=self.log)
334 334 # self.start_logging()
335 335 self.factory.init_hub()
336 336 except TraitError:
337 337 raise
338 338 except Exception:
339 339 self.log.error("Couldn't construct the Controller", exc_info=True)
340 340 self.exit(1)
341 341
342 342 if self.write_connection_files:
343 343 # save to new json config files
344 344 f = self.factory
345 345 base = {
346 346 'exec_key' : f.session.key.decode('ascii'),
347 347 'location' : self.location,
348 348 'pack' : f.session.packer,
349 349 'unpack' : f.session.unpacker,
350 350 }
351 351
352 352 cdict = {'ssh' : self.ssh_server}
353 353 cdict.update(f.client_info)
354 354 cdict.update(base)
355 355 self.save_connection_dict(self.client_json_file, cdict)
356 356
357 357 edict = {'ssh' : self.engine_ssh_server}
358 358 edict.update(f.engine_info)
359 359 edict.update(base)
360 360 self.save_connection_dict(self.engine_json_file, edict)
361 361
362 362 fname = "engines%s.json" % self.cluster_id
363 363 self.factory.hub.engine_state_file = os.path.join(self.profile_dir.log_dir, fname)
364 364 if self.restore_engines:
365 365 self.factory.hub._load_engine_state()
366 366
367 367 def init_schedulers(self):
368 368 children = self.children
369 369 mq = import_item(str(self.mq_class))
370 370
371 371 f = self.factory
372 372 ident = f.session.bsession
373 373 # disambiguate url, in case of *
374 374 monitor_url = disambiguate_url(f.monitor_url)
375 375 # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url
376 376 # IOPub relay (in a Process)
377 377 q = mq(zmq.PUB, zmq.SUB, zmq.PUB, b'N/A',b'iopub')
378 378 q.bind_in(f.client_url('iopub'))
379 379 q.setsockopt_in(zmq.IDENTITY, ident + b"_iopub")
380 380 q.bind_out(f.engine_url('iopub'))
381 381 q.setsockopt_out(zmq.SUBSCRIBE, b'')
382 382 q.connect_mon(monitor_url)
383 383 q.daemon=True
384 384 children.append(q)
385 385
386 386 # Multiplexer Queue (in a Process)
387 387 q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out')
388 388
389 389 q.bind_in(f.client_url('mux'))
390 390 q.setsockopt_in(zmq.IDENTITY, b'mux_in')
391 391 q.bind_out(f.engine_url('mux'))
392 392 q.setsockopt_out(zmq.IDENTITY, b'mux_out')
393 393 q.connect_mon(monitor_url)
394 394 q.daemon=True
395 395 children.append(q)
396 396
397 397 # Control Queue (in a Process)
398 398 q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol')
399 399 q.bind_in(f.client_url('control'))
400 400 q.setsockopt_in(zmq.IDENTITY, b'control_in')
401 401 q.bind_out(f.engine_url('control'))
402 402 q.setsockopt_out(zmq.IDENTITY, b'control_out')
403 403 q.connect_mon(monitor_url)
404 404 q.daemon=True
405 405 children.append(q)
406 406 try:
407 407 scheme = self.config.TaskScheduler.scheme_name
408 408 except AttributeError:
409 409 scheme = TaskScheduler.scheme_name.get_default_value()
410 410 # Task Queue (in a Process)
411 411 if scheme == 'pure':
412 412 self.log.warn("task::using pure DEALER Task scheduler")
413 413 q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask')
414 414 # q.setsockopt_out(zmq.HWM, hub.hwm)
415 415 q.bind_in(f.client_url('task'))
416 416 q.setsockopt_in(zmq.IDENTITY, b'task_in')
417 417 q.bind_out(f.engine_url('task'))
418 418 q.setsockopt_out(zmq.IDENTITY, b'task_out')
419 419 q.connect_mon(monitor_url)
420 420 q.daemon=True
421 421 children.append(q)
422 422 elif scheme == 'none':
423 423 self.log.warn("task::using no Task scheduler")
424 424
425 425 else:
426 426 self.log.info("task::using Python %s Task scheduler"%scheme)
427 427 sargs = (f.client_url('task'), f.engine_url('task'),
428 428 monitor_url, disambiguate_url(f.client_url('notification')),
429 429 disambiguate_url(f.client_url('registration')),
430 430 )
431 431 kwargs = dict(logname='scheduler', loglevel=self.log_level,
432 432 log_url = self.log_url, config=dict(self.config))
433 433 if 'Process' in self.mq_class:
434 434 # run the Python scheduler in a Process
435 435 q = Process(target=launch_scheduler, args=sargs, kwargs=kwargs)
436 436 q.daemon=True
437 437 children.append(q)
438 438 else:
439 439 # single-threaded Controller
440 440 kwargs['in_thread'] = True
441 441 launch_scheduler(*sargs, **kwargs)
442 442
443 443 # set unlimited HWM for all relay devices
444 444 if hasattr(zmq, 'SNDHWM'):
445 445 q = children[0]
446 446 q.setsockopt_in(zmq.RCVHWM, 0)
447 447 q.setsockopt_out(zmq.SNDHWM, 0)
448 448
449 449 for q in children[1:]:
450 450 if not hasattr(q, 'setsockopt_in'):
451 451 continue
452 452 q.setsockopt_in(zmq.SNDHWM, 0)
453 453 q.setsockopt_in(zmq.RCVHWM, 0)
454 454 q.setsockopt_out(zmq.SNDHWM, 0)
455 455 q.setsockopt_out(zmq.RCVHWM, 0)
456 456 q.setsockopt_mon(zmq.SNDHWM, 0)
457 457
458 458
459 459 def terminate_children(self):
460 460 child_procs = []
461 461 for child in self.children:
462 462 if isinstance(child, ProcessMonitoredQueue):
463 463 child_procs.append(child.launcher)
464 464 elif isinstance(child, Process):
465 465 child_procs.append(child)
466 466 if child_procs:
467 467 self.log.critical("terminating children...")
468 468 for child in child_procs:
469 469 try:
470 470 child.terminate()
471 471 except OSError:
472 472 # already dead
473 473 pass
474 474
475 475 def handle_signal(self, sig, frame):
476 476 self.log.critical("Received signal %i, shutting down", sig)
477 477 self.terminate_children()
478 478 self.loop.stop()
479 479
480 480 def init_signal(self):
481 481 for sig in (SIGINT, SIGABRT, SIGTERM):
482 482 signal(sig, self.handle_signal)
483 483
484 484 def do_import_statements(self):
485 485 statements = self.import_statements
486 486 for s in statements:
487 487 try:
488 488 self.log.msg("Executing statement: '%s'" % s)
489 489 exec s in globals(), locals()
490 490 except:
491 491 self.log.msg("Error running statement: %s" % s)
492 492
493 493 def forward_logging(self):
494 494 if self.log_url:
495 495 self.log.info("Forwarding logging to %s"%self.log_url)
496 496 context = zmq.Context.instance()
497 497 lsock = context.socket(zmq.PUB)
498 498 lsock.connect(self.log_url)
499 499 handler = PUBHandler(lsock)
500 500 handler.root_topic = 'controller'
501 501 handler.setLevel(self.log_level)
502 502 self.log.addHandler(handler)
503 503
504 504 @catch_config_error
505 505 def initialize(self, argv=None):
506 506 super(IPControllerApp, self).initialize(argv)
507 507 self.forward_logging()
508 508 self.load_secondary_config()
509 509 self.init_hub()
510 510 self.init_schedulers()
511 511
512 512 def start(self):
513 513 # Start the subprocesses:
514 514 self.factory.start()
515 515 # children must be started before signals are setup,
516 516 # otherwise signal-handling will fire multiple times
517 517 for child in self.children:
518 518 child.start()
519 519 self.init_signal()
520 520
521 521 self.write_pid_file(overwrite=True)
522 522
523 523 try:
524 524 self.factory.loop.start()
525 525 except KeyboardInterrupt:
526 526 self.log.critical("Interrupted, Exiting...\n")
527 527 finally:
528 528 self.cleanup_connection_files()
529 529
530 530
531
532 def launch_new_instance():
531 def launch_new_instance(*args, **kwargs):
533 532 """Create and run the IPython controller"""
534 533 if sys.platform == 'win32':
535 534 # make sure we don't get called from a multiprocessing subprocess
536 535 # this can result in infinite Controllers being started on Windows
537 536 # which doesn't have a proper fork, so multiprocessing is wonky
538 537
539 538 # this only comes up when IPython has been installed using vanilla
540 539 # setuptools, and *not* distribute.
541 540 import multiprocessing
542 541 p = multiprocessing.current_process()
543 542 # the main process has name 'MainProcess'
544 543 # subprocesses will have names like 'Process-1'
545 544 if p.name != 'MainProcess':
546 545 # we are a subprocess, don't start another Controller!
547 546 return
548 app = IPControllerApp.instance()
549 app.initialize()
550 app.start()
547 return IPControllerApp.launch_new_instance(*args, **kwargs)
551 548
552 549
553 550 if __name__ == '__main__':
554 551 launch_new_instance()
@@ -1,400 +1,396 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3 """
4 4 The IPython engine application
5 5
6 6 Authors:
7 7
8 8 * Brian Granger
9 9 * MinRK
10 10
11 11 """
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Copyright (C) 2008-2011 The IPython Development Team
15 15 #
16 16 # Distributed under the terms of the BSD License. The full license is in
17 17 # the file COPYING, distributed as part of this software.
18 18 #-----------------------------------------------------------------------------
19 19
20 20 #-----------------------------------------------------------------------------
21 21 # Imports
22 22 #-----------------------------------------------------------------------------
23 23
24 24 import json
25 25 import os
26 26 import sys
27 27 import time
28 28
29 29 import zmq
30 30 from zmq.eventloop import ioloop
31 31
32 32 from IPython.core.profiledir import ProfileDir
33 33 from IPython.parallel.apps.baseapp import (
34 34 BaseParallelApplication,
35 35 base_aliases,
36 36 base_flags,
37 37 catch_config_error,
38 38 )
39 39 from IPython.kernel.zmq.log import EnginePUBHandler
40 40 from IPython.kernel.zmq.ipkernel import Kernel
41 41 from IPython.kernel.zmq.kernelapp import IPKernelApp
42 42 from IPython.kernel.zmq.session import (
43 43 Session, session_aliases, session_flags
44 44 )
45 45 from IPython.kernel.zmq.zmqshell import ZMQInteractiveShell
46 46
47 47 from IPython.config.configurable import Configurable
48 48
49 49 from IPython.parallel.engine.engine import EngineFactory
50 50 from IPython.parallel.util import disambiguate_ip_address
51 51
52 52 from IPython.utils.importstring import import_item
53 53 from IPython.utils.py3compat import cast_bytes
54 54 from IPython.utils.traitlets import Bool, Unicode, Dict, List, Float, Instance
55 55
56 56
57 57 #-----------------------------------------------------------------------------
58 58 # Module level variables
59 59 #-----------------------------------------------------------------------------
60 60
61 61 #: The default config file name for this application
62 62 default_config_file_name = u'ipengine_config.py'
63 63
64 64 _description = """Start an IPython engine for parallel computing.
65 65
66 66 IPython engines run in parallel and perform computations on behalf of a client
67 67 and controller. A controller needs to be started before the engines. The
68 68 engine can be configured using command line options or using a cluster
69 69 directory. Cluster directories contain config, log and security files and are
70 70 usually located in your ipython directory and named as "profile_name".
71 71 See the `profile` and `profile-dir` options for details.
72 72 """
73 73
74 74 _examples = """
75 75 ipengine --ip=192.168.0.1 --port=1000 # connect to hub at ip and port
76 76 ipengine --log-to-file --log-level=DEBUG # log to a file with DEBUG verbosity
77 77 """
78 78
79 79 #-----------------------------------------------------------------------------
80 80 # MPI configuration
81 81 #-----------------------------------------------------------------------------
82 82
83 83 mpi4py_init = """from mpi4py import MPI as mpi
84 84 mpi.size = mpi.COMM_WORLD.Get_size()
85 85 mpi.rank = mpi.COMM_WORLD.Get_rank()
86 86 """
87 87
88 88
89 89 pytrilinos_init = """from PyTrilinos import Epetra
90 90 class SimpleStruct:
91 91 pass
92 92 mpi = SimpleStruct()
93 93 mpi.rank = 0
94 94 mpi.size = 0
95 95 """
96 96
97 97 class MPI(Configurable):
98 98 """Configurable for MPI initialization"""
99 99 use = Unicode('', config=True,
100 100 help='How to enable MPI (mpi4py, pytrilinos, or empty string to disable).'
101 101 )
102 102
103 103 def _use_changed(self, name, old, new):
104 104 # load default init script if it's not set
105 105 if not self.init_script:
106 106 self.init_script = self.default_inits.get(new, '')
107 107
108 108 init_script = Unicode('', config=True,
109 109 help="Initialization code for MPI")
110 110
111 111 default_inits = Dict({'mpi4py' : mpi4py_init, 'pytrilinos':pytrilinos_init},
112 112 config=True)
113 113
114 114
115 115 #-----------------------------------------------------------------------------
116 116 # Main application
117 117 #-----------------------------------------------------------------------------
118 118 aliases = dict(
119 119 file = 'IPEngineApp.url_file',
120 120 c = 'IPEngineApp.startup_command',
121 121 s = 'IPEngineApp.startup_script',
122 122
123 123 url = 'EngineFactory.url',
124 124 ssh = 'EngineFactory.sshserver',
125 125 sshkey = 'EngineFactory.sshkey',
126 126 ip = 'EngineFactory.ip',
127 127 transport = 'EngineFactory.transport',
128 128 port = 'EngineFactory.regport',
129 129 location = 'EngineFactory.location',
130 130
131 131 timeout = 'EngineFactory.timeout',
132 132
133 133 mpi = 'MPI.use',
134 134
135 135 )
136 136 aliases.update(base_aliases)
137 137 aliases.update(session_aliases)
138 138 flags = {}
139 139 flags.update(base_flags)
140 140 flags.update(session_flags)
141 141
142 142 class IPEngineApp(BaseParallelApplication):
143 143
144 144 name = 'ipengine'
145 145 description = _description
146 146 examples = _examples
147 147 config_file_name = Unicode(default_config_file_name)
148 148 classes = List([ZMQInteractiveShell, ProfileDir, Session, EngineFactory, Kernel, MPI])
149 149
150 150 startup_script = Unicode(u'', config=True,
151 151 help='specify a script to be run at startup')
152 152 startup_command = Unicode('', config=True,
153 153 help='specify a command to be run at startup')
154 154
155 155 url_file = Unicode(u'', config=True,
156 156 help="""The full location of the file containing the connection information for
157 157 the controller. If this is not given, the file must be in the
158 158 security directory of the cluster directory. This location is
159 159 resolved using the `profile` or `profile_dir` options.""",
160 160 )
161 161 wait_for_url_file = Float(5, config=True,
162 162 help="""The maximum number of seconds to wait for url_file to exist.
163 163 This is useful for batch-systems and shared-filesystems where the
164 164 controller and engine are started at the same time and it
165 165 may take a moment for the controller to write the connector files.""")
166 166
167 167 url_file_name = Unicode(u'ipcontroller-engine.json', config=True)
168 168
169 169 def _cluster_id_changed(self, name, old, new):
170 170 if new:
171 171 base = 'ipcontroller-%s' % new
172 172 else:
173 173 base = 'ipcontroller'
174 174 self.url_file_name = "%s-engine.json" % base
175 175
176 176 log_url = Unicode('', config=True,
177 177 help="""The URL for the iploggerapp instance, for forwarding
178 178 logging to a central location.""")
179 179
180 180 # an IPKernelApp instance, used to setup listening for shell frontends
181 181 kernel_app = Instance(IPKernelApp)
182 182
183 183 aliases = Dict(aliases)
184 184 flags = Dict(flags)
185 185
186 186 @property
187 187 def kernel(self):
188 188 """allow access to the Kernel object, so I look like IPKernelApp"""
189 189 return self.engine.kernel
190 190
191 191 def find_url_file(self):
192 192 """Set the url file.
193 193
194 194 Here we don't try to actually see if it exists for is valid as that
195 195 is hadled by the connection logic.
196 196 """
197 197 config = self.config
198 198 # Find the actual controller key file
199 199 if not self.url_file:
200 200 self.url_file = os.path.join(
201 201 self.profile_dir.security_dir,
202 202 self.url_file_name
203 203 )
204 204
205 205 def load_connector_file(self):
206 206 """load config from a JSON connector file,
207 207 at a *lower* priority than command-line/config files.
208 208 """
209 209
210 210 self.log.info("Loading url_file %r", self.url_file)
211 211 config = self.config
212 212
213 213 with open(self.url_file) as f:
214 214 d = json.loads(f.read())
215 215
216 216 # allow hand-override of location for disambiguation
217 217 # and ssh-server
218 218 try:
219 219 config.EngineFactory.location
220 220 except AttributeError:
221 221 config.EngineFactory.location = d['location']
222 222
223 223 try:
224 224 config.EngineFactory.sshserver
225 225 except AttributeError:
226 226 config.EngineFactory.sshserver = d.get('ssh')
227 227
228 228 location = config.EngineFactory.location
229 229
230 230 proto, ip = d['interface'].split('://')
231 231 ip = disambiguate_ip_address(ip, location)
232 232 d['interface'] = '%s://%s' % (proto, ip)
233 233
234 234 # DO NOT allow override of basic URLs, serialization, or exec_key
235 235 # JSON file takes top priority there
236 236 config.Session.key = cast_bytes(d['exec_key'])
237 237
238 238 config.EngineFactory.url = d['interface'] + ':%i' % d['registration']
239 239
240 240 config.Session.packer = d['pack']
241 241 config.Session.unpacker = d['unpack']
242 242
243 243 self.log.debug("Config changed:")
244 244 self.log.debug("%r", config)
245 245 self.connection_info = d
246 246
247 247 def bind_kernel(self, **kwargs):
248 248 """Promote engine to listening kernel, accessible to frontends."""
249 249 if self.kernel_app is not None:
250 250 return
251 251
252 252 self.log.info("Opening ports for direct connections as an IPython kernel")
253 253
254 254 kernel = self.kernel
255 255
256 256 kwargs.setdefault('config', self.config)
257 257 kwargs.setdefault('log', self.log)
258 258 kwargs.setdefault('profile_dir', self.profile_dir)
259 259 kwargs.setdefault('session', self.engine.session)
260 260
261 261 app = self.kernel_app = IPKernelApp(**kwargs)
262 262
263 263 # allow IPKernelApp.instance():
264 264 IPKernelApp._instance = app
265 265
266 266 app.init_connection_file()
267 267 # relevant contents of init_sockets:
268 268
269 269 app.shell_port = app._bind_socket(kernel.shell_streams[0], app.shell_port)
270 270 app.log.debug("shell ROUTER Channel on port: %i", app.shell_port)
271 271
272 272 app.iopub_port = app._bind_socket(kernel.iopub_socket, app.iopub_port)
273 273 app.log.debug("iopub PUB Channel on port: %i", app.iopub_port)
274 274
275 275 kernel.stdin_socket = self.engine.context.socket(zmq.ROUTER)
276 276 app.stdin_port = app._bind_socket(kernel.stdin_socket, app.stdin_port)
277 277 app.log.debug("stdin ROUTER Channel on port: %i", app.stdin_port)
278 278
279 279 # start the heartbeat, and log connection info:
280 280
281 281 app.init_heartbeat()
282 282
283 283 app.log_connection_info()
284 284 app.write_connection_file()
285 285
286 286
287 287 def init_engine(self):
288 288 # This is the working dir by now.
289 289 sys.path.insert(0, '')
290 290 config = self.config
291 291 # print config
292 292 self.find_url_file()
293 293
294 294 # was the url manually specified?
295 295 keys = set(self.config.EngineFactory.keys())
296 296 keys = keys.union(set(self.config.RegistrationFactory.keys()))
297 297
298 298 if keys.intersection(set(['ip', 'url', 'port'])):
299 299 # Connection info was specified, don't wait for the file
300 300 url_specified = True
301 301 self.wait_for_url_file = 0
302 302 else:
303 303 url_specified = False
304 304
305 305 if self.wait_for_url_file and not os.path.exists(self.url_file):
306 306 self.log.warn("url_file %r not found", self.url_file)
307 307 self.log.warn("Waiting up to %.1f seconds for it to arrive.", self.wait_for_url_file)
308 308 tic = time.time()
309 309 while not os.path.exists(self.url_file) and (time.time()-tic < self.wait_for_url_file):
310 310 # wait for url_file to exist, or until time limit
311 311 time.sleep(0.1)
312 312
313 313 if os.path.exists(self.url_file):
314 314 self.load_connector_file()
315 315 elif not url_specified:
316 316 self.log.fatal("Fatal: url file never arrived: %s", self.url_file)
317 317 self.exit(1)
318 318
319 319
320 320 try:
321 321 exec_lines = config.IPKernelApp.exec_lines
322 322 except AttributeError:
323 323 try:
324 324 exec_lines = config.InteractiveShellApp.exec_lines
325 325 except AttributeError:
326 326 exec_lines = config.IPKernelApp.exec_lines = []
327 327 try:
328 328 exec_files = config.IPKernelApp.exec_files
329 329 except AttributeError:
330 330 try:
331 331 exec_files = config.InteractiveShellApp.exec_files
332 332 except AttributeError:
333 333 exec_files = config.IPKernelApp.exec_files = []
334 334
335 335 if self.startup_script:
336 336 exec_files.append(self.startup_script)
337 337 if self.startup_command:
338 338 exec_lines.append(self.startup_command)
339 339
340 340 # Create the underlying shell class and Engine
341 341 # shell_class = import_item(self.master_config.Global.shell_class)
342 342 # print self.config
343 343 try:
344 344 self.engine = EngineFactory(config=config, log=self.log,
345 345 connection_info=self.connection_info,
346 346 )
347 347 except:
348 348 self.log.error("Couldn't start the Engine", exc_info=True)
349 349 self.exit(1)
350 350
351 351 def forward_logging(self):
352 352 if self.log_url:
353 353 self.log.info("Forwarding logging to %s", self.log_url)
354 354 context = self.engine.context
355 355 lsock = context.socket(zmq.PUB)
356 356 lsock.connect(self.log_url)
357 357 handler = EnginePUBHandler(self.engine, lsock)
358 358 handler.setLevel(self.log_level)
359 359 self.log.addHandler(handler)
360 360
361 361 def init_mpi(self):
362 362 global mpi
363 363 self.mpi = MPI(parent=self)
364 364
365 365 mpi_import_statement = self.mpi.init_script
366 366 if mpi_import_statement:
367 367 try:
368 368 self.log.info("Initializing MPI:")
369 369 self.log.info(mpi_import_statement)
370 370 exec mpi_import_statement in globals()
371 371 except:
372 372 mpi = None
373 373 else:
374 374 mpi = None
375 375
376 376 @catch_config_error
377 377 def initialize(self, argv=None):
378 378 super(IPEngineApp, self).initialize(argv)
379 379 self.init_mpi()
380 380 self.init_engine()
381 381 self.forward_logging()
382 382
383 383 def start(self):
384 384 self.engine.start()
385 385 try:
386 386 self.engine.loop.start()
387 387 except KeyboardInterrupt:
388 388 self.log.critical("Engine Interrupted, shutting down...\n")
389 389
390 390
391 def launch_new_instance():
392 """Create and run the IPython engine"""
393 app = IPEngineApp.instance()
394 app.initialize()
395 app.start()
391 launch_new_instance = IPEngineApp.launch_new_instance
396 392
397 393
398 394 if __name__ == '__main__':
399 395 launch_new_instance()
400 396
@@ -1,103 +1,99 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3 """
4 4 A simple IPython logger application
5 5
6 6 Authors:
7 7
8 8 * MinRK
9 9
10 10 """
11 11
12 12 #-----------------------------------------------------------------------------
13 13 # Copyright (C) 2011 The IPython Development Team
14 14 #
15 15 # Distributed under the terms of the BSD License. The full license is in
16 16 # the file COPYING, distributed as part of this software.
17 17 #-----------------------------------------------------------------------------
18 18
19 19 #-----------------------------------------------------------------------------
20 20 # Imports
21 21 #-----------------------------------------------------------------------------
22 22
23 23 import os
24 24 import sys
25 25
26 26 import zmq
27 27
28 28 from IPython.core.profiledir import ProfileDir
29 29 from IPython.utils.traitlets import Bool, Dict, Unicode
30 30
31 31 from IPython.parallel.apps.baseapp import (
32 32 BaseParallelApplication,
33 33 base_aliases,
34 34 catch_config_error,
35 35 )
36 36 from IPython.parallel.apps.logwatcher import LogWatcher
37 37
38 38 #-----------------------------------------------------------------------------
39 39 # Module level variables
40 40 #-----------------------------------------------------------------------------
41 41
42 42 #: The default config file name for this application
43 43 default_config_file_name = u'iplogger_config.py'
44 44
45 45 _description = """Start an IPython logger for parallel computing.
46 46
47 47 IPython controllers and engines (and your own processes) can broadcast log messages
48 48 by registering a `zmq.log.handlers.PUBHandler` with the `logging` module. The
49 49 logger can be configured using command line options or using a cluster
50 50 directory. Cluster directories contain config, log and security files and are
51 51 usually located in your ipython directory and named as "profile_name".
52 52 See the `profile` and `profile-dir` options for details.
53 53 """
54 54
55 55
56 56 #-----------------------------------------------------------------------------
57 57 # Main application
58 58 #-----------------------------------------------------------------------------
59 59 aliases = {}
60 60 aliases.update(base_aliases)
61 61 aliases.update(dict(url='LogWatcher.url', topics='LogWatcher.topics'))
62 62
63 63 class IPLoggerApp(BaseParallelApplication):
64 64
65 65 name = u'iplogger'
66 66 description = _description
67 67 config_file_name = Unicode(default_config_file_name)
68 68
69 69 classes = [LogWatcher, ProfileDir]
70 70 aliases = Dict(aliases)
71 71
72 72 @catch_config_error
73 73 def initialize(self, argv=None):
74 74 super(IPLoggerApp, self).initialize(argv)
75 75 self.init_watcher()
76 76
77 77 def init_watcher(self):
78 78 try:
79 79 self.watcher = LogWatcher(parent=self, log=self.log)
80 80 except:
81 81 self.log.error("Couldn't start the LogWatcher", exc_info=True)
82 82 self.exit(1)
83 83 self.log.info("Listening for log messages on %r"%self.watcher.url)
84 84
85 85
86 86 def start(self):
87 87 self.watcher.start()
88 88 try:
89 89 self.watcher.loop.start()
90 90 except KeyboardInterrupt:
91 91 self.log.critical("Logging Interrupted, shutting down...\n")
92 92
93 93
94 def launch_new_instance():
95 """Create and run the IPython LogWatcher"""
96 app = IPLoggerApp.instance()
97 app.initialize()
98 app.start()
94 launch_new_instance = IPLoggerApp.launch_new_instance
99 95
100 96
101 97 if __name__ == '__main__':
102 98 launch_new_instance()
103 99
@@ -1,152 +1,149 b''
1 1 """ A minimal application using the ZMQ-based terminal IPython frontend.
2 2
3 3 This is not a complete console app, as subprocess will not be able to receive
4 4 input, there is no real readline support, among other limitations.
5 5
6 6 Authors:
7 7
8 8 * Min RK
9 9 * Paul Ivanov
10 10
11 11 """
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Imports
15 15 #-----------------------------------------------------------------------------
16 16 import signal
17 17
18 18 from IPython.terminal.ipapp import TerminalIPythonApp, frontend_flags as term_flags
19 19
20 20 from IPython.utils.traitlets import (
21 21 Dict, Any
22 22 )
23 23 from IPython.utils.warn import error
24 24
25 25 from IPython.consoleapp import (
26 26 IPythonConsoleApp, app_aliases, app_flags, aliases, flags
27 27 )
28 28
29 29 from IPython.terminal.console.interactiveshell import ZMQTerminalInteractiveShell
30 30
31 31 #-----------------------------------------------------------------------------
32 32 # Globals
33 33 #-----------------------------------------------------------------------------
34 34
35 35 _examples = """
36 36 ipython console # start the ZMQ-based console
37 37 ipython console --existing # connect to an existing ipython session
38 38 """
39 39
40 40 #-----------------------------------------------------------------------------
41 41 # Flags and Aliases
42 42 #-----------------------------------------------------------------------------
43 43
44 44 # copy flags from mixin:
45 45 flags = dict(flags)
46 46 # start with mixin frontend flags:
47 47 frontend_flags = dict(app_flags)
48 48 # add TerminalIPApp flags:
49 49 frontend_flags.update(term_flags)
50 50 # disable quick startup, as it won't propagate to the kernel anyway
51 51 frontend_flags.pop('quick')
52 52 # update full dict with frontend flags:
53 53 flags.update(frontend_flags)
54 54
55 55 # copy flags from mixin
56 56 aliases = dict(aliases)
57 57 # start with mixin frontend flags
58 58 frontend_aliases = dict(app_aliases)
59 59 # load updated frontend flags into full dict
60 60 aliases.update(frontend_aliases)
61 61
62 62 # get flags&aliases into sets, and remove a couple that
63 63 # shouldn't be scrubbed from backend flags:
64 64 frontend_aliases = set(frontend_aliases.keys())
65 65 frontend_flags = set(frontend_flags.keys())
66 66
67 67
68 68 #-----------------------------------------------------------------------------
69 69 # Classes
70 70 #-----------------------------------------------------------------------------
71 71
72 72
73 73 class ZMQTerminalIPythonApp(TerminalIPythonApp, IPythonConsoleApp):
74 74 name = "ipython-console"
75 75 """Start a terminal frontend to the IPython zmq kernel."""
76 76
77 77 description = """
78 78 The IPython terminal-based Console.
79 79
80 80 This launches a Console application inside a terminal.
81 81
82 82 The Console supports various extra features beyond the traditional
83 83 single-process Terminal IPython shell, such as connecting to an
84 84 existing ipython session, via:
85 85
86 86 ipython console --existing
87 87
88 88 where the previous session could have been created by another ipython
89 89 console, an ipython qtconsole, or by opening an ipython notebook.
90 90
91 91 """
92 92 examples = _examples
93 93
94 94 classes = [ZMQTerminalInteractiveShell] + IPythonConsoleApp.classes
95 95 flags = Dict(flags)
96 96 aliases = Dict(aliases)
97 97 frontend_aliases = Any(frontend_aliases)
98 98 frontend_flags = Any(frontend_flags)
99 99
100 100 subcommands = Dict()
101 101
102 102 def parse_command_line(self, argv=None):
103 103 super(ZMQTerminalIPythonApp, self).parse_command_line(argv)
104 104 self.build_kernel_argv(argv)
105 105
106 106 def init_shell(self):
107 107 IPythonConsoleApp.initialize(self)
108 108 # relay sigint to kernel
109 109 signal.signal(signal.SIGINT, self.handle_sigint)
110 110 self.shell = ZMQTerminalInteractiveShell.instance(parent=self,
111 111 display_banner=False, profile_dir=self.profile_dir,
112 112 ipython_dir=self.ipython_dir,
113 113 manager=self.kernel_manager,
114 114 client=self.kernel_client,
115 115 )
116 116
117 117 def init_gui_pylab(self):
118 118 # no-op, because we don't want to import matplotlib in the frontend.
119 119 pass
120 120
121 121 def handle_sigint(self, *args):
122 122 if self.shell._executing:
123 123 if self.kernel_manager:
124 124 # interrupt already gets passed to subprocess by signal handler.
125 125 # Only if we prevent that should we need to explicitly call
126 126 # interrupt_kernel, until which time, this would result in a
127 127 # double-interrupt:
128 128 # self.kernel_manager.interrupt_kernel()
129 129 pass
130 130 else:
131 131 self.shell.write_err('\n')
132 132 error("Cannot interrupt kernels we didn't start.\n")
133 133 else:
134 134 # raise the KeyboardInterrupt if we aren't waiting for execution,
135 135 # so that the interact loop advances, and prompt is redrawn, etc.
136 136 raise KeyboardInterrupt
137 137
138 138
139 139 def init_code(self):
140 140 # no-op in the frontend, code gets run in the backend
141 141 pass
142 142
143 def launch_new_instance():
144 """Create and run a full blown IPython instance"""
145 app = ZMQTerminalIPythonApp.instance()
146 app.initialize()
147 app.start()
143
144 launch_new_instance = ZMQTerminalIPythonApp.launch_new_instance
148 145
149 146
150 147 if __name__ == '__main__':
151 148 launch_new_instance()
152 149
@@ -1,400 +1,396 b''
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3 """
4 4 The :class:`~IPython.core.application.Application` object for the command
5 5 line :command:`ipython` program.
6 6
7 7 Authors
8 8 -------
9 9
10 10 * Brian Granger
11 11 * Fernando Perez
12 12 * Min Ragan-Kelley
13 13 """
14 14
15 15 #-----------------------------------------------------------------------------
16 16 # Copyright (C) 2008-2011 The IPython Development Team
17 17 #
18 18 # Distributed under the terms of the BSD License. The full license is in
19 19 # the file COPYING, distributed as part of this software.
20 20 #-----------------------------------------------------------------------------
21 21
22 22 #-----------------------------------------------------------------------------
23 23 # Imports
24 24 #-----------------------------------------------------------------------------
25 25
26 26 from __future__ import absolute_import
27 27
28 28 import logging
29 29 import os
30 30 import sys
31 31
32 32 from IPython.config.loader import (
33 33 Config, PyFileConfigLoader, ConfigFileNotFound
34 34 )
35 35 from IPython.config.application import boolean_flag, catch_config_error
36 36 from IPython.core import release
37 37 from IPython.core import usage
38 38 from IPython.core.completer import IPCompleter
39 39 from IPython.core.crashhandler import CrashHandler
40 40 from IPython.core.formatters import PlainTextFormatter
41 41 from IPython.core.history import HistoryManager
42 42 from IPython.core.prompts import PromptManager
43 43 from IPython.core.application import (
44 44 ProfileDir, BaseIPythonApplication, base_flags, base_aliases
45 45 )
46 46 from IPython.core.magics import ScriptMagics
47 47 from IPython.core.shellapp import (
48 48 InteractiveShellApp, shell_flags, shell_aliases
49 49 )
50 50 from IPython.terminal.interactiveshell import TerminalInteractiveShell
51 51 from IPython.utils import warn
52 52 from IPython.utils.path import get_ipython_dir, check_for_old_config
53 53 from IPython.utils.traitlets import (
54 54 Bool, List, Dict,
55 55 )
56 56
57 57 #-----------------------------------------------------------------------------
58 58 # Globals, utilities and helpers
59 59 #-----------------------------------------------------------------------------
60 60
61 61 #: The default config file name for this application.
62 62 default_config_file_name = u'ipython_config.py'
63 63
64 64 _examples = """
65 65 ipython --pylab # start in pylab mode
66 66 ipython --pylab=qt # start in pylab mode with the qt4 backend
67 67 ipython --log-level=DEBUG # set logging to DEBUG
68 68 ipython --profile=foo # start with profile foo
69 69
70 70 ipython qtconsole # start the qtconsole GUI application
71 71 ipython help qtconsole # show the help for the qtconsole subcmd
72 72
73 73 ipython console # start the terminal-based console application
74 74 ipython help console # show the help for the console subcmd
75 75
76 76 ipython notebook # start the IPython notebook
77 77 ipython help notebook # show the help for the notebook subcmd
78 78
79 79 ipython profile create foo # create profile foo w/ default config files
80 80 ipython help profile # show the help for the profile subcmd
81 81
82 82 ipython locate # print the path to the IPython directory
83 83 ipython locate profile foo # print the path to the directory for profile `foo`
84 84
85 85 ipython nbconvert # convert notebooks to/from other formats
86 86 """
87 87
88 88 #-----------------------------------------------------------------------------
89 89 # Crash handler for this application
90 90 #-----------------------------------------------------------------------------
91 91
92 92 class IPAppCrashHandler(CrashHandler):
93 93 """sys.excepthook for IPython itself, leaves a detailed report on disk."""
94 94
95 95 def __init__(self, app):
96 96 contact_name = release.author
97 97 contact_email = release.author_email
98 98 bug_tracker = 'https://github.com/ipython/ipython/issues'
99 99 super(IPAppCrashHandler,self).__init__(
100 100 app, contact_name, contact_email, bug_tracker
101 101 )
102 102
103 103 def make_report(self,traceback):
104 104 """Return a string containing a crash report."""
105 105
106 106 sec_sep = self.section_sep
107 107 # Start with parent report
108 108 report = [super(IPAppCrashHandler, self).make_report(traceback)]
109 109 # Add interactive-specific info we may have
110 110 rpt_add = report.append
111 111 try:
112 112 rpt_add(sec_sep+"History of session input:")
113 113 for line in self.app.shell.user_ns['_ih']:
114 114 rpt_add(line)
115 115 rpt_add('\n*** Last line of input (may not be in above history):\n')
116 116 rpt_add(self.app.shell._last_input_line+'\n')
117 117 except:
118 118 pass
119 119
120 120 return ''.join(report)
121 121
122 122 #-----------------------------------------------------------------------------
123 123 # Aliases and Flags
124 124 #-----------------------------------------------------------------------------
125 125 flags = dict(base_flags)
126 126 flags.update(shell_flags)
127 127 frontend_flags = {}
128 128 addflag = lambda *args: frontend_flags.update(boolean_flag(*args))
129 129 addflag('autoedit-syntax', 'TerminalInteractiveShell.autoedit_syntax',
130 130 'Turn on auto editing of files with syntax errors.',
131 131 'Turn off auto editing of files with syntax errors.'
132 132 )
133 133 addflag('banner', 'TerminalIPythonApp.display_banner',
134 134 "Display a banner upon starting IPython.",
135 135 "Don't display a banner upon starting IPython."
136 136 )
137 137 addflag('confirm-exit', 'TerminalInteractiveShell.confirm_exit',
138 138 """Set to confirm when you try to exit IPython with an EOF (Control-D
139 139 in Unix, Control-Z/Enter in Windows). By typing 'exit' or 'quit',
140 140 you can force a direct exit without any confirmation.""",
141 141 "Don't prompt the user when exiting."
142 142 )
143 143 addflag('term-title', 'TerminalInteractiveShell.term_title',
144 144 "Enable auto setting the terminal title.",
145 145 "Disable auto setting the terminal title."
146 146 )
147 147 classic_config = Config()
148 148 classic_config.InteractiveShell.cache_size = 0
149 149 classic_config.PlainTextFormatter.pprint = False
150 150 classic_config.PromptManager.in_template = '>>> '
151 151 classic_config.PromptManager.in2_template = '... '
152 152 classic_config.PromptManager.out_template = ''
153 153 classic_config.InteractiveShell.separate_in = ''
154 154 classic_config.InteractiveShell.separate_out = ''
155 155 classic_config.InteractiveShell.separate_out2 = ''
156 156 classic_config.InteractiveShell.colors = 'NoColor'
157 157 classic_config.InteractiveShell.xmode = 'Plain'
158 158
159 159 frontend_flags['classic']=(
160 160 classic_config,
161 161 "Gives IPython a similar feel to the classic Python prompt."
162 162 )
163 163 # # log doesn't make so much sense this way anymore
164 164 # paa('--log','-l',
165 165 # action='store_true', dest='InteractiveShell.logstart',
166 166 # help="Start logging to the default log file (./ipython_log.py).")
167 167 #
168 168 # # quick is harder to implement
169 169 frontend_flags['quick']=(
170 170 {'TerminalIPythonApp' : {'quick' : True}},
171 171 "Enable quick startup with no config files."
172 172 )
173 173
174 174 frontend_flags['i'] = (
175 175 {'TerminalIPythonApp' : {'force_interact' : True}},
176 176 """If running code from the command line, become interactive afterwards.
177 177 Note: can also be given simply as '-i.'"""
178 178 )
179 179 flags.update(frontend_flags)
180 180
181 181 aliases = dict(base_aliases)
182 182 aliases.update(shell_aliases)
183 183
184 184 #-----------------------------------------------------------------------------
185 185 # Main classes and functions
186 186 #-----------------------------------------------------------------------------
187 187
188 188
189 189 class LocateIPythonApp(BaseIPythonApplication):
190 190 description = """print the path to the IPython dir"""
191 191 subcommands = Dict(dict(
192 192 profile=('IPython.core.profileapp.ProfileLocate',
193 193 "print the path to an IPython profile directory",
194 194 ),
195 195 ))
196 196 def start(self):
197 197 if self.subapp is not None:
198 198 return self.subapp.start()
199 199 else:
200 200 print self.ipython_dir
201 201
202 202
203 203 class TerminalIPythonApp(BaseIPythonApplication, InteractiveShellApp):
204 204 name = u'ipython'
205 205 description = usage.cl_usage
206 206 default_config_file_name = default_config_file_name
207 207 crash_handler_class = IPAppCrashHandler
208 208 examples = _examples
209 209
210 210 flags = Dict(flags)
211 211 aliases = Dict(aliases)
212 212 classes = List()
213 213 def _classes_default(self):
214 214 """This has to be in a method, for TerminalIPythonApp to be available."""
215 215 return [
216 216 InteractiveShellApp, # ShellApp comes before TerminalApp, because
217 217 self.__class__, # it will also affect subclasses (e.g. QtConsole)
218 218 TerminalInteractiveShell,
219 219 PromptManager,
220 220 HistoryManager,
221 221 ProfileDir,
222 222 PlainTextFormatter,
223 223 IPCompleter,
224 224 ScriptMagics,
225 225 ]
226 226
227 227 subcommands = Dict(dict(
228 228 qtconsole=('IPython.qt.console.qtconsoleapp.IPythonQtConsoleApp',
229 229 """Launch the IPython Qt Console."""
230 230 ),
231 231 notebook=('IPython.html.notebookapp.NotebookApp',
232 232 """Launch the IPython HTML Notebook Server."""
233 233 ),
234 234 profile = ("IPython.core.profileapp.ProfileApp",
235 235 "Create and manage IPython profiles."
236 236 ),
237 237 kernel = ("IPython.kernel.zmq.kernelapp.IPKernelApp",
238 238 "Start a kernel without an attached frontend."
239 239 ),
240 240 console=('IPython.terminal.console.app.ZMQTerminalIPythonApp',
241 241 """Launch the IPython terminal-based Console."""
242 242 ),
243 243 locate=('IPython.terminal.ipapp.LocateIPythonApp',
244 244 LocateIPythonApp.description
245 245 ),
246 246 history=('IPython.core.historyapp.HistoryApp',
247 247 "Manage the IPython history database."
248 248 ),
249 249 nbconvert=('IPython.nbconvert.nbconvertapp.NbConvertApp',
250 250 "Convert notebooks to/from other formats."
251 251 ),
252 252 ))
253 253
254 254 # *do* autocreate requested profile, but don't create the config file.
255 255 auto_create=Bool(True)
256 256 # configurables
257 257 ignore_old_config=Bool(False, config=True,
258 258 help="Suppress warning messages about legacy config files"
259 259 )
260 260 quick = Bool(False, config=True,
261 261 help="""Start IPython quickly by skipping the loading of config files."""
262 262 )
263 263 def _quick_changed(self, name, old, new):
264 264 if new:
265 265 self.load_config_file = lambda *a, **kw: None
266 266 self.ignore_old_config=True
267 267
268 268 display_banner = Bool(True, config=True,
269 269 help="Whether to display a banner upon starting IPython."
270 270 )
271 271
272 272 # if there is code of files to run from the cmd line, don't interact
273 273 # unless the --i flag (App.force_interact) is true.
274 274 force_interact = Bool(False, config=True,
275 275 help="""If a command or file is given via the command-line,
276 276 e.g. 'ipython foo.py"""
277 277 )
278 278 def _force_interact_changed(self, name, old, new):
279 279 if new:
280 280 self.interact = True
281 281
282 282 def _file_to_run_changed(self, name, old, new):
283 283 if new:
284 284 self.something_to_run = True
285 285 if new and not self.force_interact:
286 286 self.interact = False
287 287 _code_to_run_changed = _file_to_run_changed
288 288 _module_to_run_changed = _file_to_run_changed
289 289
290 290 # internal, not-configurable
291 291 interact=Bool(True)
292 292 something_to_run=Bool(False)
293 293
294 294 def parse_command_line(self, argv=None):
295 295 """override to allow old '-pylab' flag with deprecation warning"""
296 296
297 297 argv = sys.argv[1:] if argv is None else argv
298 298
299 299 if '-pylab' in argv:
300 300 # deprecated `-pylab` given,
301 301 # warn and transform into current syntax
302 302 argv = argv[:] # copy, don't clobber
303 303 idx = argv.index('-pylab')
304 304 warn.warn("`-pylab` flag has been deprecated.\n"
305 305 " Use `--pylab` instead, or `--pylab=foo` to specify a backend.")
306 306 sub = '--pylab'
307 307 if len(argv) > idx+1:
308 308 # check for gui arg, as in '-pylab qt'
309 309 gui = argv[idx+1]
310 310 if gui in ('wx', 'qt', 'qt4', 'gtk', 'auto'):
311 311 sub = '--pylab='+gui
312 312 argv.pop(idx+1)
313 313 argv[idx] = sub
314 314
315 315 return super(TerminalIPythonApp, self).parse_command_line(argv)
316 316
317 317 @catch_config_error
318 318 def initialize(self, argv=None):
319 319 """Do actions after construct, but before starting the app."""
320 320 super(TerminalIPythonApp, self).initialize(argv)
321 321 if self.subapp is not None:
322 322 # don't bother initializing further, starting subapp
323 323 return
324 324 if not self.ignore_old_config:
325 325 check_for_old_config(self.ipython_dir)
326 326 # print self.extra_args
327 327 if self.extra_args and not self.something_to_run:
328 328 self.file_to_run = self.extra_args[0]
329 329 self.init_path()
330 330 # create the shell
331 331 self.init_shell()
332 332 # and draw the banner
333 333 self.init_banner()
334 334 # Now a variety of things that happen after the banner is printed.
335 335 self.init_gui_pylab()
336 336 self.init_extensions()
337 337 self.init_code()
338 338
339 339 def init_shell(self):
340 340 """initialize the InteractiveShell instance"""
341 341 # Create an InteractiveShell instance.
342 342 # shell.display_banner should always be False for the terminal
343 343 # based app, because we call shell.show_banner() by hand below
344 344 # so the banner shows *before* all extension loading stuff.
345 345 self.shell = TerminalInteractiveShell.instance(parent=self,
346 346 display_banner=False, profile_dir=self.profile_dir,
347 347 ipython_dir=self.ipython_dir)
348 348 self.shell.configurables.append(self)
349 349
350 350 def init_banner(self):
351 351 """optionally display the banner"""
352 352 if self.display_banner and self.interact:
353 353 self.shell.show_banner()
354 354 # Make sure there is a space below the banner.
355 355 if self.log_level <= logging.INFO: print
356 356
357 357 def _pylab_changed(self, name, old, new):
358 358 """Replace --pylab='inline' with --pylab='auto'"""
359 359 if new == 'inline':
360 360 warn.warn("'inline' not available as pylab backend, "
361 361 "using 'auto' instead.")
362 362 self.pylab = 'auto'
363 363
364 364 def start(self):
365 365 if self.subapp is not None:
366 366 return self.subapp.start()
367 367 # perform any prexec steps:
368 368 if self.interact:
369 369 self.log.debug("Starting IPython's mainloop...")
370 370 self.shell.mainloop()
371 371 else:
372 372 self.log.debug("IPython not interactive...")
373 373
374 374
375 375 def load_default_config(ipython_dir=None):
376 376 """Load the default config file from the default ipython_dir.
377 377
378 378 This is useful for embedded shells.
379 379 """
380 380 if ipython_dir is None:
381 381 ipython_dir = get_ipython_dir()
382 382 profile_dir = os.path.join(ipython_dir, 'profile_default')
383 383 cl = PyFileConfigLoader(default_config_file_name, profile_dir)
384 384 try:
385 385 config = cl.load_config()
386 386 except ConfigFileNotFound:
387 387 # no config found
388 388 config = Config()
389 389 return config
390 390
391 391
392 def launch_new_instance():
393 """Create and run a full blown IPython instance"""
394 app = TerminalIPythonApp.instance()
395 app.initialize()
396 app.start()
392 launch_new_instance = TerminalIPythonApp.launch_new_instance
397 393
398 394
399 395 if __name__ == '__main__':
400 396 launch_new_instance()
General Comments 0
You need to be logged in to leave comments. Login now