##// END OF EJS Templates
remove user_variables...
MinRK -
Show More
@@ -1,3223 +1,3199 b''
1 1 # -*- coding: utf-8 -*-
2 2 """Main IPython class."""
3 3
4 4 #-----------------------------------------------------------------------------
5 5 # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de>
6 6 # Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu>
7 7 # Copyright (C) 2008-2011 The IPython Development Team
8 8 #
9 9 # Distributed under the terms of the BSD License. The full license is in
10 10 # the file COPYING, distributed as part of this software.
11 11 #-----------------------------------------------------------------------------
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Imports
15 15 #-----------------------------------------------------------------------------
16 16
17 17 from __future__ import absolute_import
18 18 from __future__ import print_function
19 19
20 20 import __future__
21 21 import abc
22 22 import ast
23 23 import atexit
24 24 import functools
25 25 import os
26 26 import re
27 27 import runpy
28 28 import sys
29 29 import tempfile
30 30 import types
31 31 import subprocess
32 32 from io import open as io_open
33 33
34 34 from IPython.config.configurable import SingletonConfigurable
35 35 from IPython.core import debugger, oinspect
36 36 from IPython.core import magic
37 37 from IPython.core import page
38 38 from IPython.core import prefilter
39 39 from IPython.core import shadowns
40 40 from IPython.core import ultratb
41 41 from IPython.core.alias import AliasManager, AliasError
42 42 from IPython.core.autocall import ExitAutocall
43 43 from IPython.core.builtin_trap import BuiltinTrap
44 44 from IPython.core.events import EventManager, available_events
45 45 from IPython.core.compilerop import CachingCompiler, check_linecache_ipython
46 46 from IPython.core.display_trap import DisplayTrap
47 47 from IPython.core.displayhook import DisplayHook
48 48 from IPython.core.displaypub import DisplayPublisher
49 49 from IPython.core.error import UsageError
50 50 from IPython.core.extensions import ExtensionManager
51 51 from IPython.core.formatters import DisplayFormatter
52 52 from IPython.core.history import HistoryManager
53 53 from IPython.core.inputsplitter import IPythonInputSplitter, ESC_MAGIC, ESC_MAGIC2
54 54 from IPython.core.logger import Logger
55 55 from IPython.core.macro import Macro
56 56 from IPython.core.payload import PayloadManager
57 57 from IPython.core.prefilter import PrefilterManager
58 58 from IPython.core.profiledir import ProfileDir
59 59 from IPython.core.prompts import PromptManager
60 60 from IPython.lib.latextools import LaTeXTool
61 61 from IPython.testing.skipdoctest import skip_doctest
62 62 from IPython.utils import PyColorize
63 63 from IPython.utils import io
64 64 from IPython.utils import py3compat
65 65 from IPython.utils import openpy
66 66 from IPython.utils.decorators import undoc
67 67 from IPython.utils.io import ask_yes_no
68 68 from IPython.utils.ipstruct import Struct
69 69 from IPython.utils.path import get_home_dir, get_ipython_dir, get_py_filename, unquote_filename, ensure_dir_exists
70 70 from IPython.utils.pickleshare import PickleShareDB
71 71 from IPython.utils.process import system, getoutput
72 72 from IPython.utils.py3compat import (builtin_mod, unicode_type, string_types,
73 73 with_metaclass, iteritems)
74 74 from IPython.utils.strdispatch import StrDispatch
75 75 from IPython.utils.syspathcontext import prepended_to_syspath
76 76 from IPython.utils.text import (format_screen, LSString, SList,
77 77 DollarFormatter)
78 78 from IPython.utils.traitlets import (Integer, CBool, CaselessStrEnum, Enum,
79 79 List, Unicode, Instance, Type)
80 80 from IPython.utils.warn import warn, error
81 81 import IPython.core.hooks
82 82
83 83 #-----------------------------------------------------------------------------
84 84 # Globals
85 85 #-----------------------------------------------------------------------------
86 86
87 87 # compiled regexps for autoindent management
88 88 dedent_re = re.compile(r'^\s+raise|^\s+return|^\s+pass')
89 89
90 90 #-----------------------------------------------------------------------------
91 91 # Utilities
92 92 #-----------------------------------------------------------------------------
93 93
94 94 @undoc
95 95 def softspace(file, newvalue):
96 96 """Copied from code.py, to remove the dependency"""
97 97
98 98 oldvalue = 0
99 99 try:
100 100 oldvalue = file.softspace
101 101 except AttributeError:
102 102 pass
103 103 try:
104 104 file.softspace = newvalue
105 105 except (AttributeError, TypeError):
106 106 # "attribute-less object" or "read-only attributes"
107 107 pass
108 108 return oldvalue
109 109
110 110 @undoc
111 111 def no_op(*a, **kw): pass
112 112
113 113 @undoc
114 114 class NoOpContext(object):
115 115 def __enter__(self): pass
116 116 def __exit__(self, type, value, traceback): pass
117 117 no_op_context = NoOpContext()
118 118
119 119 class SpaceInInput(Exception): pass
120 120
121 121 @undoc
122 122 class Bunch: pass
123 123
124 124
125 125 def get_default_colors():
126 126 if sys.platform=='darwin':
127 127 return "LightBG"
128 128 elif os.name=='nt':
129 129 return 'Linux'
130 130 else:
131 131 return 'Linux'
132 132
133 133
134 134 class SeparateUnicode(Unicode):
135 135 r"""A Unicode subclass to validate separate_in, separate_out, etc.
136 136
137 137 This is a Unicode based trait that converts '0'->'' and ``'\\n'->'\n'``.
138 138 """
139 139
140 140 def validate(self, obj, value):
141 141 if value == '0': value = ''
142 142 value = value.replace('\\n','\n')
143 143 return super(SeparateUnicode, self).validate(obj, value)
144 144
145 145
146 146 class ReadlineNoRecord(object):
147 147 """Context manager to execute some code, then reload readline history
148 148 so that interactive input to the code doesn't appear when pressing up."""
149 149 def __init__(self, shell):
150 150 self.shell = shell
151 151 self._nested_level = 0
152 152
153 153 def __enter__(self):
154 154 if self._nested_level == 0:
155 155 try:
156 156 self.orig_length = self.current_length()
157 157 self.readline_tail = self.get_readline_tail()
158 158 except (AttributeError, IndexError): # Can fail with pyreadline
159 159 self.orig_length, self.readline_tail = 999999, []
160 160 self._nested_level += 1
161 161
162 162 def __exit__(self, type, value, traceback):
163 163 self._nested_level -= 1
164 164 if self._nested_level == 0:
165 165 # Try clipping the end if it's got longer
166 166 try:
167 167 e = self.current_length() - self.orig_length
168 168 if e > 0:
169 169 for _ in range(e):
170 170 self.shell.readline.remove_history_item(self.orig_length)
171 171
172 172 # If it still doesn't match, just reload readline history.
173 173 if self.current_length() != self.orig_length \
174 174 or self.get_readline_tail() != self.readline_tail:
175 175 self.shell.refill_readline_hist()
176 176 except (AttributeError, IndexError):
177 177 pass
178 178 # Returning False will cause exceptions to propagate
179 179 return False
180 180
181 181 def current_length(self):
182 182 return self.shell.readline.get_current_history_length()
183 183
184 184 def get_readline_tail(self, n=10):
185 185 """Get the last n items in readline history."""
186 186 end = self.shell.readline.get_current_history_length() + 1
187 187 start = max(end-n, 1)
188 188 ghi = self.shell.readline.get_history_item
189 189 return [ghi(x) for x in range(start, end)]
190 190
191 191
192 192 @undoc
193 193 class DummyMod(object):
194 194 """A dummy module used for IPython's interactive module when
195 195 a namespace must be assigned to the module's __dict__."""
196 196 pass
197 197
198 198 #-----------------------------------------------------------------------------
199 199 # Main IPython class
200 200 #-----------------------------------------------------------------------------
201 201
202 202 class InteractiveShell(SingletonConfigurable):
203 203 """An enhanced, interactive shell for Python."""
204 204
205 205 _instance = None
206 206
207 207 ast_transformers = List([], config=True, help=
208 208 """
209 209 A list of ast.NodeTransformer subclass instances, which will be applied
210 210 to user input before code is run.
211 211 """
212 212 )
213 213
214 214 autocall = Enum((0,1,2), default_value=0, config=True, help=
215 215 """
216 216 Make IPython automatically call any callable object even if you didn't
217 217 type explicit parentheses. For example, 'str 43' becomes 'str(43)'
218 218 automatically. The value can be '0' to disable the feature, '1' for
219 219 'smart' autocall, where it is not applied if there are no more
220 220 arguments on the line, and '2' for 'full' autocall, where all callable
221 221 objects are automatically called (even if no arguments are present).
222 222 """
223 223 )
224 224 # TODO: remove all autoindent logic and put into frontends.
225 225 # We can't do this yet because even runlines uses the autoindent.
226 226 autoindent = CBool(True, config=True, help=
227 227 """
228 228 Autoindent IPython code entered interactively.
229 229 """
230 230 )
231 231 automagic = CBool(True, config=True, help=
232 232 """
233 233 Enable magic commands to be called without the leading %.
234 234 """
235 235 )
236 236 cache_size = Integer(1000, config=True, help=
237 237 """
238 238 Set the size of the output cache. The default is 1000, you can
239 239 change it permanently in your config file. Setting it to 0 completely
240 240 disables the caching system, and the minimum value accepted is 20 (if
241 241 you provide a value less than 20, it is reset to 0 and a warning is
242 242 issued). This limit is defined because otherwise you'll spend more
243 243 time re-flushing a too small cache than working
244 244 """
245 245 )
246 246 color_info = CBool(True, config=True, help=
247 247 """
248 248 Use colors for displaying information about objects. Because this
249 249 information is passed through a pager (like 'less'), and some pagers
250 250 get confused with color codes, this capability can be turned off.
251 251 """
252 252 )
253 253 colors = CaselessStrEnum(('NoColor','LightBG','Linux'),
254 254 default_value=get_default_colors(), config=True,
255 255 help="Set the color scheme (NoColor, Linux, or LightBG)."
256 256 )
257 257 colors_force = CBool(False, help=
258 258 """
259 259 Force use of ANSI color codes, regardless of OS and readline
260 260 availability.
261 261 """
262 262 # FIXME: This is essentially a hack to allow ZMQShell to show colors
263 263 # without readline on Win32. When the ZMQ formatting system is
264 264 # refactored, this should be removed.
265 265 )
266 266 debug = CBool(False, config=True)
267 267 deep_reload = CBool(False, config=True, help=
268 268 """
269 269 Enable deep (recursive) reloading by default. IPython can use the
270 270 deep_reload module which reloads changes in modules recursively (it
271 271 replaces the reload() function, so you don't need to change anything to
272 272 use it). deep_reload() forces a full reload of modules whose code may
273 273 have changed, which the default reload() function does not. When
274 274 deep_reload is off, IPython will use the normal reload(), but
275 275 deep_reload will still be available as dreload().
276 276 """
277 277 )
278 278 disable_failing_post_execute = CBool(False, config=True,
279 279 help="Don't call post-execute functions that have failed in the past."
280 280 )
281 281 display_formatter = Instance(DisplayFormatter)
282 282 displayhook_class = Type(DisplayHook)
283 283 display_pub_class = Type(DisplayPublisher)
284 284 data_pub_class = None
285 285
286 286 exit_now = CBool(False)
287 287 exiter = Instance(ExitAutocall)
288 288 def _exiter_default(self):
289 289 return ExitAutocall(self)
290 290 # Monotonically increasing execution counter
291 291 execution_count = Integer(1)
292 292 filename = Unicode("<ipython console>")
293 293 ipython_dir= Unicode('', config=True) # Set to get_ipython_dir() in __init__
294 294
295 295 # Input splitter, to transform input line by line and detect when a block
296 296 # is ready to be executed.
297 297 input_splitter = Instance('IPython.core.inputsplitter.IPythonInputSplitter',
298 298 (), {'line_input_checker': True})
299 299
300 300 # This InputSplitter instance is used to transform completed cells before
301 301 # running them. It allows cell magics to contain blank lines.
302 302 input_transformer_manager = Instance('IPython.core.inputsplitter.IPythonInputSplitter',
303 303 (), {'line_input_checker': False})
304 304
305 305 logstart = CBool(False, config=True, help=
306 306 """
307 307 Start logging to the default log file.
308 308 """
309 309 )
310 310 logfile = Unicode('', config=True, help=
311 311 """
312 312 The name of the logfile to use.
313 313 """
314 314 )
315 315 logappend = Unicode('', config=True, help=
316 316 """
317 317 Start logging to the given file in append mode.
318 318 """
319 319 )
320 320 object_info_string_level = Enum((0,1,2), default_value=0,
321 321 config=True)
322 322 pdb = CBool(False, config=True, help=
323 323 """
324 324 Automatically call the pdb debugger after every exception.
325 325 """
326 326 )
327 327 multiline_history = CBool(sys.platform != 'win32', config=True,
328 328 help="Save multi-line entries as one entry in readline history"
329 329 )
330 330
331 331 # deprecated prompt traits:
332 332
333 333 prompt_in1 = Unicode('In [\\#]: ', config=True,
334 334 help="Deprecated, use PromptManager.in_template")
335 335 prompt_in2 = Unicode(' .\\D.: ', config=True,
336 336 help="Deprecated, use PromptManager.in2_template")
337 337 prompt_out = Unicode('Out[\\#]: ', config=True,
338 338 help="Deprecated, use PromptManager.out_template")
339 339 prompts_pad_left = CBool(True, config=True,
340 340 help="Deprecated, use PromptManager.justify")
341 341
342 342 def _prompt_trait_changed(self, name, old, new):
343 343 table = {
344 344 'prompt_in1' : 'in_template',
345 345 'prompt_in2' : 'in2_template',
346 346 'prompt_out' : 'out_template',
347 347 'prompts_pad_left' : 'justify',
348 348 }
349 349 warn("InteractiveShell.{name} is deprecated, use PromptManager.{newname}".format(
350 350 name=name, newname=table[name])
351 351 )
352 352 # protect against weird cases where self.config may not exist:
353 353 if self.config is not None:
354 354 # propagate to corresponding PromptManager trait
355 355 setattr(self.config.PromptManager, table[name], new)
356 356
357 357 _prompt_in1_changed = _prompt_trait_changed
358 358 _prompt_in2_changed = _prompt_trait_changed
359 359 _prompt_out_changed = _prompt_trait_changed
360 360 _prompt_pad_left_changed = _prompt_trait_changed
361 361
362 362 show_rewritten_input = CBool(True, config=True,
363 363 help="Show rewritten input, e.g. for autocall."
364 364 )
365 365
366 366 quiet = CBool(False, config=True)
367 367
368 368 history_length = Integer(10000, config=True)
369 369
370 370 # The readline stuff will eventually be moved to the terminal subclass
371 371 # but for now, we can't do that as readline is welded in everywhere.
372 372 readline_use = CBool(True, config=True)
373 373 readline_remove_delims = Unicode('-/~', config=True)
374 374 readline_delims = Unicode() # set by init_readline()
375 375 # don't use \M- bindings by default, because they
376 376 # conflict with 8-bit encodings. See gh-58,gh-88
377 377 readline_parse_and_bind = List([
378 378 'tab: complete',
379 379 '"\C-l": clear-screen',
380 380 'set show-all-if-ambiguous on',
381 381 '"\C-o": tab-insert',
382 382 '"\C-r": reverse-search-history',
383 383 '"\C-s": forward-search-history',
384 384 '"\C-p": history-search-backward',
385 385 '"\C-n": history-search-forward',
386 386 '"\e[A": history-search-backward',
387 387 '"\e[B": history-search-forward',
388 388 '"\C-k": kill-line',
389 389 '"\C-u": unix-line-discard',
390 390 ], allow_none=False, config=True)
391 391
392 392 ast_node_interactivity = Enum(['all', 'last', 'last_expr', 'none'],
393 393 default_value='last_expr', config=True,
394 394 help="""
395 395 'all', 'last', 'last_expr' or 'none', specifying which nodes should be
396 396 run interactively (displaying output from expressions).""")
397 397
398 398 # TODO: this part of prompt management should be moved to the frontends.
399 399 # Use custom TraitTypes that convert '0'->'' and '\\n'->'\n'
400 400 separate_in = SeparateUnicode('\n', config=True)
401 401 separate_out = SeparateUnicode('', config=True)
402 402 separate_out2 = SeparateUnicode('', config=True)
403 403 wildcards_case_sensitive = CBool(True, config=True)
404 404 xmode = CaselessStrEnum(('Context','Plain', 'Verbose'),
405 405 default_value='Context', config=True)
406 406
407 407 # Subcomponents of InteractiveShell
408 408 alias_manager = Instance('IPython.core.alias.AliasManager')
409 409 prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager')
410 410 builtin_trap = Instance('IPython.core.builtin_trap.BuiltinTrap')
411 411 display_trap = Instance('IPython.core.display_trap.DisplayTrap')
412 412 extension_manager = Instance('IPython.core.extensions.ExtensionManager')
413 413 payload_manager = Instance('IPython.core.payload.PayloadManager')
414 414 history_manager = Instance('IPython.core.history.HistoryManager')
415 415 magics_manager = Instance('IPython.core.magic.MagicsManager')
416 416
417 417 profile_dir = Instance('IPython.core.application.ProfileDir')
418 418 @property
419 419 def profile(self):
420 420 if self.profile_dir is not None:
421 421 name = os.path.basename(self.profile_dir.location)
422 422 return name.replace('profile_','')
423 423
424 424
425 425 # Private interface
426 426 _post_execute = Instance(dict)
427 427
428 428 # Tracks any GUI loop loaded for pylab
429 429 pylab_gui_select = None
430 430
431 431 def __init__(self, ipython_dir=None, profile_dir=None,
432 432 user_module=None, user_ns=None,
433 433 custom_exceptions=((), None), **kwargs):
434 434
435 435 # This is where traits with a config_key argument are updated
436 436 # from the values on config.
437 437 super(InteractiveShell, self).__init__(**kwargs)
438 438 self.configurables = [self]
439 439
440 440 # These are relatively independent and stateless
441 441 self.init_ipython_dir(ipython_dir)
442 442 self.init_profile_dir(profile_dir)
443 443 self.init_instance_attrs()
444 444 self.init_environment()
445 445
446 446 # Check if we're in a virtualenv, and set up sys.path.
447 447 self.init_virtualenv()
448 448
449 449 # Create namespaces (user_ns, user_global_ns, etc.)
450 450 self.init_create_namespaces(user_module, user_ns)
451 451 # This has to be done after init_create_namespaces because it uses
452 452 # something in self.user_ns, but before init_sys_modules, which
453 453 # is the first thing to modify sys.
454 454 # TODO: When we override sys.stdout and sys.stderr before this class
455 455 # is created, we are saving the overridden ones here. Not sure if this
456 456 # is what we want to do.
457 457 self.save_sys_module_state()
458 458 self.init_sys_modules()
459 459
460 460 # While we're trying to have each part of the code directly access what
461 461 # it needs without keeping redundant references to objects, we have too
462 462 # much legacy code that expects ip.db to exist.
463 463 self.db = PickleShareDB(os.path.join(self.profile_dir.location, 'db'))
464 464
465 465 self.init_history()
466 466 self.init_encoding()
467 467 self.init_prefilter()
468 468
469 469 self.init_syntax_highlighting()
470 470 self.init_hooks()
471 471 self.init_events()
472 472 self.init_pushd_popd_magic()
473 473 # self.init_traceback_handlers use to be here, but we moved it below
474 474 # because it and init_io have to come after init_readline.
475 475 self.init_user_ns()
476 476 self.init_logger()
477 477 self.init_builtins()
478 478
479 479 # The following was in post_config_initialization
480 480 self.init_inspector()
481 481 # init_readline() must come before init_io(), because init_io uses
482 482 # readline related things.
483 483 self.init_readline()
484 484 # We save this here in case user code replaces raw_input, but it needs
485 485 # to be after init_readline(), because PyPy's readline works by replacing
486 486 # raw_input.
487 487 if py3compat.PY3:
488 488 self.raw_input_original = input
489 489 else:
490 490 self.raw_input_original = raw_input
491 491 # init_completer must come after init_readline, because it needs to
492 492 # know whether readline is present or not system-wide to configure the
493 493 # completers, since the completion machinery can now operate
494 494 # independently of readline (e.g. over the network)
495 495 self.init_completer()
496 496 # TODO: init_io() needs to happen before init_traceback handlers
497 497 # because the traceback handlers hardcode the stdout/stderr streams.
498 498 # This logic in in debugger.Pdb and should eventually be changed.
499 499 self.init_io()
500 500 self.init_traceback_handlers(custom_exceptions)
501 501 self.init_prompts()
502 502 self.init_display_formatter()
503 503 self.init_display_pub()
504 504 self.init_data_pub()
505 505 self.init_displayhook()
506 506 self.init_latextool()
507 507 self.init_magics()
508 508 self.init_alias()
509 509 self.init_logstart()
510 510 self.init_pdb()
511 511 self.init_extension_manager()
512 512 self.init_payload()
513 513 self.init_comms()
514 514 self.hooks.late_startup_hook()
515 515 self.events.trigger('shell_initialized', self)
516 516 atexit.register(self.atexit_operations)
517 517
518 518 def get_ipython(self):
519 519 """Return the currently running IPython instance."""
520 520 return self
521 521
522 522 #-------------------------------------------------------------------------
523 523 # Trait changed handlers
524 524 #-------------------------------------------------------------------------
525 525
526 526 def _ipython_dir_changed(self, name, new):
527 527 ensure_dir_exists(new)
528 528
529 529 def set_autoindent(self,value=None):
530 530 """Set the autoindent flag, checking for readline support.
531 531
532 532 If called with no arguments, it acts as a toggle."""
533 533
534 534 if value != 0 and not self.has_readline:
535 535 if os.name == 'posix':
536 536 warn("The auto-indent feature requires the readline library")
537 537 self.autoindent = 0
538 538 return
539 539 if value is None:
540 540 self.autoindent = not self.autoindent
541 541 else:
542 542 self.autoindent = value
543 543
544 544 #-------------------------------------------------------------------------
545 545 # init_* methods called by __init__
546 546 #-------------------------------------------------------------------------
547 547
548 548 def init_ipython_dir(self, ipython_dir):
549 549 if ipython_dir is not None:
550 550 self.ipython_dir = ipython_dir
551 551 return
552 552
553 553 self.ipython_dir = get_ipython_dir()
554 554
555 555 def init_profile_dir(self, profile_dir):
556 556 if profile_dir is not None:
557 557 self.profile_dir = profile_dir
558 558 return
559 559 self.profile_dir =\
560 560 ProfileDir.create_profile_dir_by_name(self.ipython_dir, 'default')
561 561
562 562 def init_instance_attrs(self):
563 563 self.more = False
564 564
565 565 # command compiler
566 566 self.compile = CachingCompiler()
567 567
568 568 # Make an empty namespace, which extension writers can rely on both
569 569 # existing and NEVER being used by ipython itself. This gives them a
570 570 # convenient location for storing additional information and state
571 571 # their extensions may require, without fear of collisions with other
572 572 # ipython names that may develop later.
573 573 self.meta = Struct()
574 574
575 575 # Temporary files used for various purposes. Deleted at exit.
576 576 self.tempfiles = []
577 577 self.tempdirs = []
578 578
579 579 # Keep track of readline usage (later set by init_readline)
580 580 self.has_readline = False
581 581
582 582 # keep track of where we started running (mainly for crash post-mortem)
583 583 # This is not being used anywhere currently.
584 584 self.starting_dir = py3compat.getcwd()
585 585
586 586 # Indentation management
587 587 self.indent_current_nsp = 0
588 588
589 589 # Dict to track post-execution functions that have been registered
590 590 self._post_execute = {}
591 591
592 592 def init_environment(self):
593 593 """Any changes we need to make to the user's environment."""
594 594 pass
595 595
596 596 def init_encoding(self):
597 597 # Get system encoding at startup time. Certain terminals (like Emacs
598 598 # under Win32 have it set to None, and we need to have a known valid
599 599 # encoding to use in the raw_input() method
600 600 try:
601 601 self.stdin_encoding = sys.stdin.encoding or 'ascii'
602 602 except AttributeError:
603 603 self.stdin_encoding = 'ascii'
604 604
605 605 def init_syntax_highlighting(self):
606 606 # Python source parser/formatter for syntax highlighting
607 607 pyformat = PyColorize.Parser().format
608 608 self.pycolorize = lambda src: pyformat(src,'str',self.colors)
609 609
610 610 def init_pushd_popd_magic(self):
611 611 # for pushd/popd management
612 612 self.home_dir = get_home_dir()
613 613
614 614 self.dir_stack = []
615 615
616 616 def init_logger(self):
617 617 self.logger = Logger(self.home_dir, logfname='ipython_log.py',
618 618 logmode='rotate')
619 619
620 620 def init_logstart(self):
621 621 """Initialize logging in case it was requested at the command line.
622 622 """
623 623 if self.logappend:
624 624 self.magic('logstart %s append' % self.logappend)
625 625 elif self.logfile:
626 626 self.magic('logstart %s' % self.logfile)
627 627 elif self.logstart:
628 628 self.magic('logstart')
629 629
630 630 def init_builtins(self):
631 631 # A single, static flag that we set to True. Its presence indicates
632 632 # that an IPython shell has been created, and we make no attempts at
633 633 # removing on exit or representing the existence of more than one
634 634 # IPython at a time.
635 635 builtin_mod.__dict__['__IPYTHON__'] = True
636 636
637 637 # In 0.11 we introduced '__IPYTHON__active' as an integer we'd try to
638 638 # manage on enter/exit, but with all our shells it's virtually
639 639 # impossible to get all the cases right. We're leaving the name in for
640 640 # those who adapted their codes to check for this flag, but will
641 641 # eventually remove it after a few more releases.
642 642 builtin_mod.__dict__['__IPYTHON__active'] = \
643 643 'Deprecated, check for __IPYTHON__'
644 644
645 645 self.builtin_trap = BuiltinTrap(shell=self)
646 646
647 647 def init_inspector(self):
648 648 # Object inspector
649 649 self.inspector = oinspect.Inspector(oinspect.InspectColors,
650 650 PyColorize.ANSICodeColors,
651 651 'NoColor',
652 652 self.object_info_string_level)
653 653
654 654 def init_io(self):
655 655 # This will just use sys.stdout and sys.stderr. If you want to
656 656 # override sys.stdout and sys.stderr themselves, you need to do that
657 657 # *before* instantiating this class, because io holds onto
658 658 # references to the underlying streams.
659 659 if (sys.platform == 'win32' or sys.platform == 'cli') and self.has_readline:
660 660 io.stdout = io.stderr = io.IOStream(self.readline._outputfile)
661 661 else:
662 662 io.stdout = io.IOStream(sys.stdout)
663 663 io.stderr = io.IOStream(sys.stderr)
664 664
665 665 def init_prompts(self):
666 666 self.prompt_manager = PromptManager(shell=self, parent=self)
667 667 self.configurables.append(self.prompt_manager)
668 668 # Set system prompts, so that scripts can decide if they are running
669 669 # interactively.
670 670 sys.ps1 = 'In : '
671 671 sys.ps2 = '...: '
672 672 sys.ps3 = 'Out: '
673 673
674 674 def init_display_formatter(self):
675 675 self.display_formatter = DisplayFormatter(parent=self)
676 676 self.configurables.append(self.display_formatter)
677 677
678 678 def init_display_pub(self):
679 679 self.display_pub = self.display_pub_class(parent=self)
680 680 self.configurables.append(self.display_pub)
681 681
682 682 def init_data_pub(self):
683 683 if not self.data_pub_class:
684 684 self.data_pub = None
685 685 return
686 686 self.data_pub = self.data_pub_class(parent=self)
687 687 self.configurables.append(self.data_pub)
688 688
689 689 def init_displayhook(self):
690 690 # Initialize displayhook, set in/out prompts and printing system
691 691 self.displayhook = self.displayhook_class(
692 692 parent=self,
693 693 shell=self,
694 694 cache_size=self.cache_size,
695 695 )
696 696 self.configurables.append(self.displayhook)
697 697 # This is a context manager that installs/revmoes the displayhook at
698 698 # the appropriate time.
699 699 self.display_trap = DisplayTrap(hook=self.displayhook)
700 700
701 701 def init_latextool(self):
702 702 """Configure LaTeXTool."""
703 703 cfg = LaTeXTool.instance(parent=self)
704 704 if cfg not in self.configurables:
705 705 self.configurables.append(cfg)
706 706
707 707 def init_virtualenv(self):
708 708 """Add a virtualenv to sys.path so the user can import modules from it.
709 709 This isn't perfect: it doesn't use the Python interpreter with which the
710 710 virtualenv was built, and it ignores the --no-site-packages option. A
711 711 warning will appear suggesting the user installs IPython in the
712 712 virtualenv, but for many cases, it probably works well enough.
713 713
714 714 Adapted from code snippets online.
715 715
716 716 http://blog.ufsoft.org/2009/1/29/ipython-and-virtualenv
717 717 """
718 718 if 'VIRTUAL_ENV' not in os.environ:
719 719 # Not in a virtualenv
720 720 return
721 721
722 722 # venv detection:
723 723 # stdlib venv may symlink sys.executable, so we can't use realpath.
724 724 # but others can symlink *to* the venv Python, so we can't just use sys.executable.
725 725 # So we just check every item in the symlink tree (generally <= 3)
726 726 p = sys.executable
727 727 paths = [p]
728 728 while os.path.islink(p):
729 729 p = os.path.join(os.path.dirname(p), os.readlink(p))
730 730 paths.append(p)
731 731 if any(p.startswith(os.environ['VIRTUAL_ENV']) for p in paths):
732 732 # Running properly in the virtualenv, don't need to do anything
733 733 return
734 734
735 735 warn("Attempting to work in a virtualenv. If you encounter problems, please "
736 736 "install IPython inside the virtualenv.")
737 737 if sys.platform == "win32":
738 738 virtual_env = os.path.join(os.environ['VIRTUAL_ENV'], 'Lib', 'site-packages')
739 739 else:
740 740 virtual_env = os.path.join(os.environ['VIRTUAL_ENV'], 'lib',
741 741 'python%d.%d' % sys.version_info[:2], 'site-packages')
742 742
743 743 import site
744 744 sys.path.insert(0, virtual_env)
745 745 site.addsitedir(virtual_env)
746 746
747 747 #-------------------------------------------------------------------------
748 748 # Things related to injections into the sys module
749 749 #-------------------------------------------------------------------------
750 750
751 751 def save_sys_module_state(self):
752 752 """Save the state of hooks in the sys module.
753 753
754 754 This has to be called after self.user_module is created.
755 755 """
756 756 self._orig_sys_module_state = {}
757 757 self._orig_sys_module_state['stdin'] = sys.stdin
758 758 self._orig_sys_module_state['stdout'] = sys.stdout
759 759 self._orig_sys_module_state['stderr'] = sys.stderr
760 760 self._orig_sys_module_state['excepthook'] = sys.excepthook
761 761 self._orig_sys_modules_main_name = self.user_module.__name__
762 762 self._orig_sys_modules_main_mod = sys.modules.get(self.user_module.__name__)
763 763
764 764 def restore_sys_module_state(self):
765 765 """Restore the state of the sys module."""
766 766 try:
767 767 for k, v in iteritems(self._orig_sys_module_state):
768 768 setattr(sys, k, v)
769 769 except AttributeError:
770 770 pass
771 771 # Reset what what done in self.init_sys_modules
772 772 if self._orig_sys_modules_main_mod is not None:
773 773 sys.modules[self._orig_sys_modules_main_name] = self._orig_sys_modules_main_mod
774 774
775 775 #-------------------------------------------------------------------------
776 776 # Things related to hooks
777 777 #-------------------------------------------------------------------------
778 778
779 779 def init_hooks(self):
780 780 # hooks holds pointers used for user-side customizations
781 781 self.hooks = Struct()
782 782
783 783 self.strdispatchers = {}
784 784
785 785 # Set all default hooks, defined in the IPython.hooks module.
786 786 hooks = IPython.core.hooks
787 787 for hook_name in hooks.__all__:
788 788 # default hooks have priority 100, i.e. low; user hooks should have
789 789 # 0-100 priority
790 790 self.set_hook(hook_name,getattr(hooks,hook_name), 100, _warn_deprecated=False)
791 791
792 792 def set_hook(self,name,hook, priority=50, str_key=None, re_key=None,
793 793 _warn_deprecated=True):
794 794 """set_hook(name,hook) -> sets an internal IPython hook.
795 795
796 796 IPython exposes some of its internal API as user-modifiable hooks. By
797 797 adding your function to one of these hooks, you can modify IPython's
798 798 behavior to call at runtime your own routines."""
799 799
800 800 # At some point in the future, this should validate the hook before it
801 801 # accepts it. Probably at least check that the hook takes the number
802 802 # of args it's supposed to.
803 803
804 804 f = types.MethodType(hook,self)
805 805
806 806 # check if the hook is for strdispatcher first
807 807 if str_key is not None:
808 808 sdp = self.strdispatchers.get(name, StrDispatch())
809 809 sdp.add_s(str_key, f, priority )
810 810 self.strdispatchers[name] = sdp
811 811 return
812 812 if re_key is not None:
813 813 sdp = self.strdispatchers.get(name, StrDispatch())
814 814 sdp.add_re(re.compile(re_key), f, priority )
815 815 self.strdispatchers[name] = sdp
816 816 return
817 817
818 818 dp = getattr(self.hooks, name, None)
819 819 if name not in IPython.core.hooks.__all__:
820 820 print("Warning! Hook '%s' is not one of %s" % \
821 821 (name, IPython.core.hooks.__all__ ))
822 822
823 823 if _warn_deprecated and (name in IPython.core.hooks.deprecated):
824 824 alternative = IPython.core.hooks.deprecated[name]
825 825 warn("Hook {} is deprecated. Use {} instead.".format(name, alternative))
826 826
827 827 if not dp:
828 828 dp = IPython.core.hooks.CommandChainDispatcher()
829 829
830 830 try:
831 831 dp.add(f,priority)
832 832 except AttributeError:
833 833 # it was not commandchain, plain old func - replace
834 834 dp = f
835 835
836 836 setattr(self.hooks,name, dp)
837 837
838 838 #-------------------------------------------------------------------------
839 839 # Things related to events
840 840 #-------------------------------------------------------------------------
841 841
842 842 def init_events(self):
843 843 self.events = EventManager(self, available_events)
844 844
845 845 def register_post_execute(self, func):
846 846 """DEPRECATED: Use ip.events.register('post_run_cell', func)
847 847
848 848 Register a function for calling after code execution.
849 849 """
850 850 warn("ip.register_post_execute is deprecated, use "
851 851 "ip.events.register('post_run_cell', func) instead.")
852 852 self.events.register('post_run_cell', func)
853 853
854 854 #-------------------------------------------------------------------------
855 855 # Things related to the "main" module
856 856 #-------------------------------------------------------------------------
857 857
858 858 def new_main_mod(self, filename, modname):
859 859 """Return a new 'main' module object for user code execution.
860 860
861 861 ``filename`` should be the path of the script which will be run in the
862 862 module. Requests with the same filename will get the same module, with
863 863 its namespace cleared.
864 864
865 865 ``modname`` should be the module name - normally either '__main__' or
866 866 the basename of the file without the extension.
867 867
868 868 When scripts are executed via %run, we must keep a reference to their
869 869 __main__ module around so that Python doesn't
870 870 clear it, rendering references to module globals useless.
871 871
872 872 This method keeps said reference in a private dict, keyed by the
873 873 absolute path of the script. This way, for multiple executions of the
874 874 same script we only keep one copy of the namespace (the last one),
875 875 thus preventing memory leaks from old references while allowing the
876 876 objects from the last execution to be accessible.
877 877 """
878 878 filename = os.path.abspath(filename)
879 879 try:
880 880 main_mod = self._main_mod_cache[filename]
881 881 except KeyError:
882 882 main_mod = self._main_mod_cache[filename] = types.ModuleType(modname,
883 883 doc="Module created for script run in IPython")
884 884 else:
885 885 main_mod.__dict__.clear()
886 886 main_mod.__name__ = modname
887 887
888 888 main_mod.__file__ = filename
889 889 # It seems pydoc (and perhaps others) needs any module instance to
890 890 # implement a __nonzero__ method
891 891 main_mod.__nonzero__ = lambda : True
892 892
893 893 return main_mod
894 894
895 895 def clear_main_mod_cache(self):
896 896 """Clear the cache of main modules.
897 897
898 898 Mainly for use by utilities like %reset.
899 899
900 900 Examples
901 901 --------
902 902
903 903 In [15]: import IPython
904 904
905 905 In [16]: m = _ip.new_main_mod(IPython.__file__, 'IPython')
906 906
907 907 In [17]: len(_ip._main_mod_cache) > 0
908 908 Out[17]: True
909 909
910 910 In [18]: _ip.clear_main_mod_cache()
911 911
912 912 In [19]: len(_ip._main_mod_cache) == 0
913 913 Out[19]: True
914 914 """
915 915 self._main_mod_cache.clear()
916 916
917 917 #-------------------------------------------------------------------------
918 918 # Things related to debugging
919 919 #-------------------------------------------------------------------------
920 920
921 921 def init_pdb(self):
922 922 # Set calling of pdb on exceptions
923 923 # self.call_pdb is a property
924 924 self.call_pdb = self.pdb
925 925
926 926 def _get_call_pdb(self):
927 927 return self._call_pdb
928 928
929 929 def _set_call_pdb(self,val):
930 930
931 931 if val not in (0,1,False,True):
932 932 raise ValueError('new call_pdb value must be boolean')
933 933
934 934 # store value in instance
935 935 self._call_pdb = val
936 936
937 937 # notify the actual exception handlers
938 938 self.InteractiveTB.call_pdb = val
939 939
940 940 call_pdb = property(_get_call_pdb,_set_call_pdb,None,
941 941 'Control auto-activation of pdb at exceptions')
942 942
943 943 def debugger(self,force=False):
944 944 """Call the pydb/pdb debugger.
945 945
946 946 Keywords:
947 947
948 948 - force(False): by default, this routine checks the instance call_pdb
949 949 flag and does not actually invoke the debugger if the flag is false.
950 950 The 'force' option forces the debugger to activate even if the flag
951 951 is false.
952 952 """
953 953
954 954 if not (force or self.call_pdb):
955 955 return
956 956
957 957 if not hasattr(sys,'last_traceback'):
958 958 error('No traceback has been produced, nothing to debug.')
959 959 return
960 960
961 961 # use pydb if available
962 962 if debugger.has_pydb:
963 963 from pydb import pm
964 964 else:
965 965 # fallback to our internal debugger
966 966 pm = lambda : self.InteractiveTB.debugger(force=True)
967 967
968 968 with self.readline_no_record:
969 969 pm()
970 970
971 971 #-------------------------------------------------------------------------
972 972 # Things related to IPython's various namespaces
973 973 #-------------------------------------------------------------------------
974 974 default_user_namespaces = True
975 975
976 976 def init_create_namespaces(self, user_module=None, user_ns=None):
977 977 # Create the namespace where the user will operate. user_ns is
978 978 # normally the only one used, and it is passed to the exec calls as
979 979 # the locals argument. But we do carry a user_global_ns namespace
980 980 # given as the exec 'globals' argument, This is useful in embedding
981 981 # situations where the ipython shell opens in a context where the
982 982 # distinction between locals and globals is meaningful. For
983 983 # non-embedded contexts, it is just the same object as the user_ns dict.
984 984
985 985 # FIXME. For some strange reason, __builtins__ is showing up at user
986 986 # level as a dict instead of a module. This is a manual fix, but I
987 987 # should really track down where the problem is coming from. Alex
988 988 # Schmolck reported this problem first.
989 989
990 990 # A useful post by Alex Martelli on this topic:
991 991 # Re: inconsistent value from __builtins__
992 992 # Von: Alex Martelli <aleaxit@yahoo.com>
993 993 # Datum: Freitag 01 Oktober 2004 04:45:34 nachmittags/abends
994 994 # Gruppen: comp.lang.python
995 995
996 996 # Michael Hohn <hohn@hooknose.lbl.gov> wrote:
997 997 # > >>> print type(builtin_check.get_global_binding('__builtins__'))
998 998 # > <type 'dict'>
999 999 # > >>> print type(__builtins__)
1000 1000 # > <type 'module'>
1001 1001 # > Is this difference in return value intentional?
1002 1002
1003 1003 # Well, it's documented that '__builtins__' can be either a dictionary
1004 1004 # or a module, and it's been that way for a long time. Whether it's
1005 1005 # intentional (or sensible), I don't know. In any case, the idea is
1006 1006 # that if you need to access the built-in namespace directly, you
1007 1007 # should start with "import __builtin__" (note, no 's') which will
1008 1008 # definitely give you a module. Yeah, it's somewhat confusing:-(.
1009 1009
1010 1010 # These routines return a properly built module and dict as needed by
1011 1011 # the rest of the code, and can also be used by extension writers to
1012 1012 # generate properly initialized namespaces.
1013 1013 if (user_ns is not None) or (user_module is not None):
1014 1014 self.default_user_namespaces = False
1015 1015 self.user_module, self.user_ns = self.prepare_user_module(user_module, user_ns)
1016 1016
1017 1017 # A record of hidden variables we have added to the user namespace, so
1018 1018 # we can list later only variables defined in actual interactive use.
1019 1019 self.user_ns_hidden = {}
1020 1020
1021 1021 # Now that FakeModule produces a real module, we've run into a nasty
1022 1022 # problem: after script execution (via %run), the module where the user
1023 1023 # code ran is deleted. Now that this object is a true module (needed
1024 1024 # so docetst and other tools work correctly), the Python module
1025 1025 # teardown mechanism runs over it, and sets to None every variable
1026 1026 # present in that module. Top-level references to objects from the
1027 1027 # script survive, because the user_ns is updated with them. However,
1028 1028 # calling functions defined in the script that use other things from
1029 1029 # the script will fail, because the function's closure had references
1030 1030 # to the original objects, which are now all None. So we must protect
1031 1031 # these modules from deletion by keeping a cache.
1032 1032 #
1033 1033 # To avoid keeping stale modules around (we only need the one from the
1034 1034 # last run), we use a dict keyed with the full path to the script, so
1035 1035 # only the last version of the module is held in the cache. Note,
1036 1036 # however, that we must cache the module *namespace contents* (their
1037 1037 # __dict__). Because if we try to cache the actual modules, old ones
1038 1038 # (uncached) could be destroyed while still holding references (such as
1039 1039 # those held by GUI objects that tend to be long-lived)>
1040 1040 #
1041 1041 # The %reset command will flush this cache. See the cache_main_mod()
1042 1042 # and clear_main_mod_cache() methods for details on use.
1043 1043
1044 1044 # This is the cache used for 'main' namespaces
1045 1045 self._main_mod_cache = {}
1046 1046
1047 1047 # A table holding all the namespaces IPython deals with, so that
1048 1048 # introspection facilities can search easily.
1049 1049 self.ns_table = {'user_global':self.user_module.__dict__,
1050 1050 'user_local':self.user_ns,
1051 1051 'builtin':builtin_mod.__dict__
1052 1052 }
1053 1053
1054 1054 @property
1055 1055 def user_global_ns(self):
1056 1056 return self.user_module.__dict__
1057 1057
1058 1058 def prepare_user_module(self, user_module=None, user_ns=None):
1059 1059 """Prepare the module and namespace in which user code will be run.
1060 1060
1061 1061 When IPython is started normally, both parameters are None: a new module
1062 1062 is created automatically, and its __dict__ used as the namespace.
1063 1063
1064 1064 If only user_module is provided, its __dict__ is used as the namespace.
1065 1065 If only user_ns is provided, a dummy module is created, and user_ns
1066 1066 becomes the global namespace. If both are provided (as they may be
1067 1067 when embedding), user_ns is the local namespace, and user_module
1068 1068 provides the global namespace.
1069 1069
1070 1070 Parameters
1071 1071 ----------
1072 1072 user_module : module, optional
1073 1073 The current user module in which IPython is being run. If None,
1074 1074 a clean module will be created.
1075 1075 user_ns : dict, optional
1076 1076 A namespace in which to run interactive commands.
1077 1077
1078 1078 Returns
1079 1079 -------
1080 1080 A tuple of user_module and user_ns, each properly initialised.
1081 1081 """
1082 1082 if user_module is None and user_ns is not None:
1083 1083 user_ns.setdefault("__name__", "__main__")
1084 1084 user_module = DummyMod()
1085 1085 user_module.__dict__ = user_ns
1086 1086
1087 1087 if user_module is None:
1088 1088 user_module = types.ModuleType("__main__",
1089 1089 doc="Automatically created module for IPython interactive environment")
1090 1090
1091 1091 # We must ensure that __builtin__ (without the final 's') is always
1092 1092 # available and pointing to the __builtin__ *module*. For more details:
1093 1093 # http://mail.python.org/pipermail/python-dev/2001-April/014068.html
1094 1094 user_module.__dict__.setdefault('__builtin__', builtin_mod)
1095 1095 user_module.__dict__.setdefault('__builtins__', builtin_mod)
1096 1096
1097 1097 if user_ns is None:
1098 1098 user_ns = user_module.__dict__
1099 1099
1100 1100 return user_module, user_ns
1101 1101
1102 1102 def init_sys_modules(self):
1103 1103 # We need to insert into sys.modules something that looks like a
1104 1104 # module but which accesses the IPython namespace, for shelve and
1105 1105 # pickle to work interactively. Normally they rely on getting
1106 1106 # everything out of __main__, but for embedding purposes each IPython
1107 1107 # instance has its own private namespace, so we can't go shoving
1108 1108 # everything into __main__.
1109 1109
1110 1110 # note, however, that we should only do this for non-embedded
1111 1111 # ipythons, which really mimic the __main__.__dict__ with their own
1112 1112 # namespace. Embedded instances, on the other hand, should not do
1113 1113 # this because they need to manage the user local/global namespaces
1114 1114 # only, but they live within a 'normal' __main__ (meaning, they
1115 1115 # shouldn't overtake the execution environment of the script they're
1116 1116 # embedded in).
1117 1117
1118 1118 # This is overridden in the InteractiveShellEmbed subclass to a no-op.
1119 1119 main_name = self.user_module.__name__
1120 1120 sys.modules[main_name] = self.user_module
1121 1121
1122 1122 def init_user_ns(self):
1123 1123 """Initialize all user-visible namespaces to their minimum defaults.
1124 1124
1125 1125 Certain history lists are also initialized here, as they effectively
1126 1126 act as user namespaces.
1127 1127
1128 1128 Notes
1129 1129 -----
1130 1130 All data structures here are only filled in, they are NOT reset by this
1131 1131 method. If they were not empty before, data will simply be added to
1132 1132 therm.
1133 1133 """
1134 1134 # This function works in two parts: first we put a few things in
1135 1135 # user_ns, and we sync that contents into user_ns_hidden so that these
1136 1136 # initial variables aren't shown by %who. After the sync, we add the
1137 1137 # rest of what we *do* want the user to see with %who even on a new
1138 1138 # session (probably nothing, so theye really only see their own stuff)
1139 1139
1140 1140 # The user dict must *always* have a __builtin__ reference to the
1141 1141 # Python standard __builtin__ namespace, which must be imported.
1142 1142 # This is so that certain operations in prompt evaluation can be
1143 1143 # reliably executed with builtins. Note that we can NOT use
1144 1144 # __builtins__ (note the 's'), because that can either be a dict or a
1145 1145 # module, and can even mutate at runtime, depending on the context
1146 1146 # (Python makes no guarantees on it). In contrast, __builtin__ is
1147 1147 # always a module object, though it must be explicitly imported.
1148 1148
1149 1149 # For more details:
1150 1150 # http://mail.python.org/pipermail/python-dev/2001-April/014068.html
1151 1151 ns = dict()
1152 1152
1153 1153 # make global variables for user access to the histories
1154 1154 ns['_ih'] = self.history_manager.input_hist_parsed
1155 1155 ns['_oh'] = self.history_manager.output_hist
1156 1156 ns['_dh'] = self.history_manager.dir_hist
1157 1157
1158 1158 ns['_sh'] = shadowns
1159 1159
1160 1160 # user aliases to input and output histories. These shouldn't show up
1161 1161 # in %who, as they can have very large reprs.
1162 1162 ns['In'] = self.history_manager.input_hist_parsed
1163 1163 ns['Out'] = self.history_manager.output_hist
1164 1164
1165 1165 # Store myself as the public api!!!
1166 1166 ns['get_ipython'] = self.get_ipython
1167 1167
1168 1168 ns['exit'] = self.exiter
1169 1169 ns['quit'] = self.exiter
1170 1170
1171 1171 # Sync what we've added so far to user_ns_hidden so these aren't seen
1172 1172 # by %who
1173 1173 self.user_ns_hidden.update(ns)
1174 1174
1175 1175 # Anything put into ns now would show up in %who. Think twice before
1176 1176 # putting anything here, as we really want %who to show the user their
1177 1177 # stuff, not our variables.
1178 1178
1179 1179 # Finally, update the real user's namespace
1180 1180 self.user_ns.update(ns)
1181 1181
1182 1182 @property
1183 1183 def all_ns_refs(self):
1184 1184 """Get a list of references to all the namespace dictionaries in which
1185 1185 IPython might store a user-created object.
1186 1186
1187 1187 Note that this does not include the displayhook, which also caches
1188 1188 objects from the output."""
1189 1189 return [self.user_ns, self.user_global_ns, self.user_ns_hidden] + \
1190 1190 [m.__dict__ for m in self._main_mod_cache.values()]
1191 1191
1192 1192 def reset(self, new_session=True):
1193 1193 """Clear all internal namespaces, and attempt to release references to
1194 1194 user objects.
1195 1195
1196 1196 If new_session is True, a new history session will be opened.
1197 1197 """
1198 1198 # Clear histories
1199 1199 self.history_manager.reset(new_session)
1200 1200 # Reset counter used to index all histories
1201 1201 if new_session:
1202 1202 self.execution_count = 1
1203 1203
1204 1204 # Flush cached output items
1205 1205 if self.displayhook.do_full_cache:
1206 1206 self.displayhook.flush()
1207 1207
1208 1208 # The main execution namespaces must be cleared very carefully,
1209 1209 # skipping the deletion of the builtin-related keys, because doing so
1210 1210 # would cause errors in many object's __del__ methods.
1211 1211 if self.user_ns is not self.user_global_ns:
1212 1212 self.user_ns.clear()
1213 1213 ns = self.user_global_ns
1214 1214 drop_keys = set(ns.keys())
1215 1215 drop_keys.discard('__builtin__')
1216 1216 drop_keys.discard('__builtins__')
1217 1217 drop_keys.discard('__name__')
1218 1218 for k in drop_keys:
1219 1219 del ns[k]
1220 1220
1221 1221 self.user_ns_hidden.clear()
1222 1222
1223 1223 # Restore the user namespaces to minimal usability
1224 1224 self.init_user_ns()
1225 1225
1226 1226 # Restore the default and user aliases
1227 1227 self.alias_manager.clear_aliases()
1228 1228 self.alias_manager.init_aliases()
1229 1229
1230 1230 # Flush the private list of module references kept for script
1231 1231 # execution protection
1232 1232 self.clear_main_mod_cache()
1233 1233
1234 1234 def del_var(self, varname, by_name=False):
1235 1235 """Delete a variable from the various namespaces, so that, as
1236 1236 far as possible, we're not keeping any hidden references to it.
1237 1237
1238 1238 Parameters
1239 1239 ----------
1240 1240 varname : str
1241 1241 The name of the variable to delete.
1242 1242 by_name : bool
1243 1243 If True, delete variables with the given name in each
1244 1244 namespace. If False (default), find the variable in the user
1245 1245 namespace, and delete references to it.
1246 1246 """
1247 1247 if varname in ('__builtin__', '__builtins__'):
1248 1248 raise ValueError("Refusing to delete %s" % varname)
1249 1249
1250 1250 ns_refs = self.all_ns_refs
1251 1251
1252 1252 if by_name: # Delete by name
1253 1253 for ns in ns_refs:
1254 1254 try:
1255 1255 del ns[varname]
1256 1256 except KeyError:
1257 1257 pass
1258 1258 else: # Delete by object
1259 1259 try:
1260 1260 obj = self.user_ns[varname]
1261 1261 except KeyError:
1262 1262 raise NameError("name '%s' is not defined" % varname)
1263 1263 # Also check in output history
1264 1264 ns_refs.append(self.history_manager.output_hist)
1265 1265 for ns in ns_refs:
1266 1266 to_delete = [n for n, o in iteritems(ns) if o is obj]
1267 1267 for name in to_delete:
1268 1268 del ns[name]
1269 1269
1270 1270 # displayhook keeps extra references, but not in a dictionary
1271 1271 for name in ('_', '__', '___'):
1272 1272 if getattr(self.displayhook, name) is obj:
1273 1273 setattr(self.displayhook, name, None)
1274 1274
1275 1275 def reset_selective(self, regex=None):
1276 1276 """Clear selective variables from internal namespaces based on a
1277 1277 specified regular expression.
1278 1278
1279 1279 Parameters
1280 1280 ----------
1281 1281 regex : string or compiled pattern, optional
1282 1282 A regular expression pattern that will be used in searching
1283 1283 variable names in the users namespaces.
1284 1284 """
1285 1285 if regex is not None:
1286 1286 try:
1287 1287 m = re.compile(regex)
1288 1288 except TypeError:
1289 1289 raise TypeError('regex must be a string or compiled pattern')
1290 1290 # Search for keys in each namespace that match the given regex
1291 1291 # If a match is found, delete the key/value pair.
1292 1292 for ns in self.all_ns_refs:
1293 1293 for var in ns:
1294 1294 if m.search(var):
1295 1295 del ns[var]
1296 1296
1297 1297 def push(self, variables, interactive=True):
1298 1298 """Inject a group of variables into the IPython user namespace.
1299 1299
1300 1300 Parameters
1301 1301 ----------
1302 1302 variables : dict, str or list/tuple of str
1303 1303 The variables to inject into the user's namespace. If a dict, a
1304 1304 simple update is done. If a str, the string is assumed to have
1305 1305 variable names separated by spaces. A list/tuple of str can also
1306 1306 be used to give the variable names. If just the variable names are
1307 1307 give (list/tuple/str) then the variable values looked up in the
1308 1308 callers frame.
1309 1309 interactive : bool
1310 1310 If True (default), the variables will be listed with the ``who``
1311 1311 magic.
1312 1312 """
1313 1313 vdict = None
1314 1314
1315 1315 # We need a dict of name/value pairs to do namespace updates.
1316 1316 if isinstance(variables, dict):
1317 1317 vdict = variables
1318 1318 elif isinstance(variables, string_types+(list, tuple)):
1319 1319 if isinstance(variables, string_types):
1320 1320 vlist = variables.split()
1321 1321 else:
1322 1322 vlist = variables
1323 1323 vdict = {}
1324 1324 cf = sys._getframe(1)
1325 1325 for name in vlist:
1326 1326 try:
1327 1327 vdict[name] = eval(name, cf.f_globals, cf.f_locals)
1328 1328 except:
1329 1329 print('Could not get variable %s from %s' %
1330 1330 (name,cf.f_code.co_name))
1331 1331 else:
1332 1332 raise ValueError('variables must be a dict/str/list/tuple')
1333 1333
1334 1334 # Propagate variables to user namespace
1335 1335 self.user_ns.update(vdict)
1336 1336
1337 1337 # And configure interactive visibility
1338 1338 user_ns_hidden = self.user_ns_hidden
1339 1339 if interactive:
1340 1340 for name in vdict:
1341 1341 user_ns_hidden.pop(name, None)
1342 1342 else:
1343 1343 user_ns_hidden.update(vdict)
1344 1344
1345 1345 def drop_by_id(self, variables):
1346 1346 """Remove a dict of variables from the user namespace, if they are the
1347 1347 same as the values in the dictionary.
1348 1348
1349 1349 This is intended for use by extensions: variables that they've added can
1350 1350 be taken back out if they are unloaded, without removing any that the
1351 1351 user has overwritten.
1352 1352
1353 1353 Parameters
1354 1354 ----------
1355 1355 variables : dict
1356 1356 A dictionary mapping object names (as strings) to the objects.
1357 1357 """
1358 1358 for name, obj in iteritems(variables):
1359 1359 if name in self.user_ns and self.user_ns[name] is obj:
1360 1360 del self.user_ns[name]
1361 1361 self.user_ns_hidden.pop(name, None)
1362 1362
1363 1363 #-------------------------------------------------------------------------
1364 1364 # Things related to object introspection
1365 1365 #-------------------------------------------------------------------------
1366 1366
1367 1367 def _ofind(self, oname, namespaces=None):
1368 1368 """Find an object in the available namespaces.
1369 1369
1370 1370 self._ofind(oname) -> dict with keys: found,obj,ospace,ismagic
1371 1371
1372 1372 Has special code to detect magic functions.
1373 1373 """
1374 1374 oname = oname.strip()
1375 1375 #print '1- oname: <%r>' % oname # dbg
1376 1376 if not oname.startswith(ESC_MAGIC) and \
1377 1377 not oname.startswith(ESC_MAGIC2) and \
1378 1378 not py3compat.isidentifier(oname, dotted=True):
1379 1379 return dict(found=False)
1380 1380
1381 1381 alias_ns = None
1382 1382 if namespaces is None:
1383 1383 # Namespaces to search in:
1384 1384 # Put them in a list. The order is important so that we
1385 1385 # find things in the same order that Python finds them.
1386 1386 namespaces = [ ('Interactive', self.user_ns),
1387 1387 ('Interactive (global)', self.user_global_ns),
1388 1388 ('Python builtin', builtin_mod.__dict__),
1389 1389 ]
1390 1390
1391 1391 # initialize results to 'null'
1392 1392 found = False; obj = None; ospace = None; ds = None;
1393 1393 ismagic = False; isalias = False; parent = None
1394 1394
1395 1395 # We need to special-case 'print', which as of python2.6 registers as a
1396 1396 # function but should only be treated as one if print_function was
1397 1397 # loaded with a future import. In this case, just bail.
1398 1398 if (oname == 'print' and not py3compat.PY3 and not \
1399 1399 (self.compile.compiler_flags & __future__.CO_FUTURE_PRINT_FUNCTION)):
1400 1400 return {'found':found, 'obj':obj, 'namespace':ospace,
1401 1401 'ismagic':ismagic, 'isalias':isalias, 'parent':parent}
1402 1402
1403 1403 # Look for the given name by splitting it in parts. If the head is
1404 1404 # found, then we look for all the remaining parts as members, and only
1405 1405 # declare success if we can find them all.
1406 1406 oname_parts = oname.split('.')
1407 1407 oname_head, oname_rest = oname_parts[0],oname_parts[1:]
1408 1408 for nsname,ns in namespaces:
1409 1409 try:
1410 1410 obj = ns[oname_head]
1411 1411 except KeyError:
1412 1412 continue
1413 1413 else:
1414 1414 #print 'oname_rest:', oname_rest # dbg
1415 1415 for part in oname_rest:
1416 1416 try:
1417 1417 parent = obj
1418 1418 obj = getattr(obj,part)
1419 1419 except:
1420 1420 # Blanket except b/c some badly implemented objects
1421 1421 # allow __getattr__ to raise exceptions other than
1422 1422 # AttributeError, which then crashes IPython.
1423 1423 break
1424 1424 else:
1425 1425 # If we finish the for loop (no break), we got all members
1426 1426 found = True
1427 1427 ospace = nsname
1428 1428 break # namespace loop
1429 1429
1430 1430 # Try to see if it's magic
1431 1431 if not found:
1432 1432 obj = None
1433 1433 if oname.startswith(ESC_MAGIC2):
1434 1434 oname = oname.lstrip(ESC_MAGIC2)
1435 1435 obj = self.find_cell_magic(oname)
1436 1436 elif oname.startswith(ESC_MAGIC):
1437 1437 oname = oname.lstrip(ESC_MAGIC)
1438 1438 obj = self.find_line_magic(oname)
1439 1439 else:
1440 1440 # search without prefix, so run? will find %run?
1441 1441 obj = self.find_line_magic(oname)
1442 1442 if obj is None:
1443 1443 obj = self.find_cell_magic(oname)
1444 1444 if obj is not None:
1445 1445 found = True
1446 1446 ospace = 'IPython internal'
1447 1447 ismagic = True
1448 1448
1449 1449 # Last try: special-case some literals like '', [], {}, etc:
1450 1450 if not found and oname_head in ["''",'""','[]','{}','()']:
1451 1451 obj = eval(oname_head)
1452 1452 found = True
1453 1453 ospace = 'Interactive'
1454 1454
1455 1455 return {'found':found, 'obj':obj, 'namespace':ospace,
1456 1456 'ismagic':ismagic, 'isalias':isalias, 'parent':parent}
1457 1457
1458 1458 def _ofind_property(self, oname, info):
1459 1459 """Second part of object finding, to look for property details."""
1460 1460 if info.found:
1461 1461 # Get the docstring of the class property if it exists.
1462 1462 path = oname.split('.')
1463 1463 root = '.'.join(path[:-1])
1464 1464 if info.parent is not None:
1465 1465 try:
1466 1466 target = getattr(info.parent, '__class__')
1467 1467 # The object belongs to a class instance.
1468 1468 try:
1469 1469 target = getattr(target, path[-1])
1470 1470 # The class defines the object.
1471 1471 if isinstance(target, property):
1472 1472 oname = root + '.__class__.' + path[-1]
1473 1473 info = Struct(self._ofind(oname))
1474 1474 except AttributeError: pass
1475 1475 except AttributeError: pass
1476 1476
1477 1477 # We return either the new info or the unmodified input if the object
1478 1478 # hadn't been found
1479 1479 return info
1480 1480
1481 1481 def _object_find(self, oname, namespaces=None):
1482 1482 """Find an object and return a struct with info about it."""
1483 1483 inf = Struct(self._ofind(oname, namespaces))
1484 1484 return Struct(self._ofind_property(oname, inf))
1485 1485
1486 1486 def _inspect(self, meth, oname, namespaces=None, **kw):
1487 1487 """Generic interface to the inspector system.
1488 1488
1489 1489 This function is meant to be called by pdef, pdoc & friends."""
1490 1490 info = self._object_find(oname, namespaces)
1491 1491 if info.found:
1492 1492 pmethod = getattr(self.inspector, meth)
1493 1493 formatter = format_screen if info.ismagic else None
1494 1494 if meth == 'pdoc':
1495 1495 pmethod(info.obj, oname, formatter)
1496 1496 elif meth == 'pinfo':
1497 1497 pmethod(info.obj, oname, formatter, info, **kw)
1498 1498 else:
1499 1499 pmethod(info.obj, oname)
1500 1500 else:
1501 1501 print('Object `%s` not found.' % oname)
1502 1502 return 'not found' # so callers can take other action
1503 1503
1504 1504 def object_inspect(self, oname, detail_level=0):
1505 1505 with self.builtin_trap:
1506 1506 info = self._object_find(oname)
1507 1507 if info.found:
1508 1508 return self.inspector.info(info.obj, oname, info=info,
1509 1509 detail_level=detail_level
1510 1510 )
1511 1511 else:
1512 1512 return oinspect.object_info(name=oname, found=False)
1513 1513
1514 1514 #-------------------------------------------------------------------------
1515 1515 # Things related to history management
1516 1516 #-------------------------------------------------------------------------
1517 1517
1518 1518 def init_history(self):
1519 1519 """Sets up the command history, and starts regular autosaves."""
1520 1520 self.history_manager = HistoryManager(shell=self, parent=self)
1521 1521 self.configurables.append(self.history_manager)
1522 1522
1523 1523 #-------------------------------------------------------------------------
1524 1524 # Things related to exception handling and tracebacks (not debugging)
1525 1525 #-------------------------------------------------------------------------
1526 1526
1527 1527 def init_traceback_handlers(self, custom_exceptions):
1528 1528 # Syntax error handler.
1529 1529 self.SyntaxTB = ultratb.SyntaxTB(color_scheme='NoColor')
1530 1530
1531 1531 # The interactive one is initialized with an offset, meaning we always
1532 1532 # want to remove the topmost item in the traceback, which is our own
1533 1533 # internal code. Valid modes: ['Plain','Context','Verbose']
1534 1534 self.InteractiveTB = ultratb.AutoFormattedTB(mode = 'Plain',
1535 1535 color_scheme='NoColor',
1536 1536 tb_offset = 1,
1537 1537 check_cache=check_linecache_ipython)
1538 1538
1539 1539 # The instance will store a pointer to the system-wide exception hook,
1540 1540 # so that runtime code (such as magics) can access it. This is because
1541 1541 # during the read-eval loop, it may get temporarily overwritten.
1542 1542 self.sys_excepthook = sys.excepthook
1543 1543
1544 1544 # and add any custom exception handlers the user may have specified
1545 1545 self.set_custom_exc(*custom_exceptions)
1546 1546
1547 1547 # Set the exception mode
1548 1548 self.InteractiveTB.set_mode(mode=self.xmode)
1549 1549
1550 1550 def set_custom_exc(self, exc_tuple, handler):
1551 1551 """set_custom_exc(exc_tuple,handler)
1552 1552
1553 1553 Set a custom exception handler, which will be called if any of the
1554 1554 exceptions in exc_tuple occur in the mainloop (specifically, in the
1555 1555 run_code() method).
1556 1556
1557 1557 Parameters
1558 1558 ----------
1559 1559
1560 1560 exc_tuple : tuple of exception classes
1561 1561 A *tuple* of exception classes, for which to call the defined
1562 1562 handler. It is very important that you use a tuple, and NOT A
1563 1563 LIST here, because of the way Python's except statement works. If
1564 1564 you only want to trap a single exception, use a singleton tuple::
1565 1565
1566 1566 exc_tuple == (MyCustomException,)
1567 1567
1568 1568 handler : callable
1569 1569 handler must have the following signature::
1570 1570
1571 1571 def my_handler(self, etype, value, tb, tb_offset=None):
1572 1572 ...
1573 1573 return structured_traceback
1574 1574
1575 1575 Your handler must return a structured traceback (a list of strings),
1576 1576 or None.
1577 1577
1578 1578 This will be made into an instance method (via types.MethodType)
1579 1579 of IPython itself, and it will be called if any of the exceptions
1580 1580 listed in the exc_tuple are caught. If the handler is None, an
1581 1581 internal basic one is used, which just prints basic info.
1582 1582
1583 1583 To protect IPython from crashes, if your handler ever raises an
1584 1584 exception or returns an invalid result, it will be immediately
1585 1585 disabled.
1586 1586
1587 1587 WARNING: by putting in your own exception handler into IPython's main
1588 1588 execution loop, you run a very good chance of nasty crashes. This
1589 1589 facility should only be used if you really know what you are doing."""
1590 1590
1591 1591 assert type(exc_tuple)==type(()) , \
1592 1592 "The custom exceptions must be given AS A TUPLE."
1593 1593
1594 1594 def dummy_handler(self,etype,value,tb,tb_offset=None):
1595 1595 print('*** Simple custom exception handler ***')
1596 1596 print('Exception type :',etype)
1597 1597 print('Exception value:',value)
1598 1598 print('Traceback :',tb)
1599 1599 #print 'Source code :','\n'.join(self.buffer)
1600 1600
1601 1601 def validate_stb(stb):
1602 1602 """validate structured traceback return type
1603 1603
1604 1604 return type of CustomTB *should* be a list of strings, but allow
1605 1605 single strings or None, which are harmless.
1606 1606
1607 1607 This function will *always* return a list of strings,
1608 1608 and will raise a TypeError if stb is inappropriate.
1609 1609 """
1610 1610 msg = "CustomTB must return list of strings, not %r" % stb
1611 1611 if stb is None:
1612 1612 return []
1613 1613 elif isinstance(stb, string_types):
1614 1614 return [stb]
1615 1615 elif not isinstance(stb, list):
1616 1616 raise TypeError(msg)
1617 1617 # it's a list
1618 1618 for line in stb:
1619 1619 # check every element
1620 1620 if not isinstance(line, string_types):
1621 1621 raise TypeError(msg)
1622 1622 return stb
1623 1623
1624 1624 if handler is None:
1625 1625 wrapped = dummy_handler
1626 1626 else:
1627 1627 def wrapped(self,etype,value,tb,tb_offset=None):
1628 1628 """wrap CustomTB handler, to protect IPython from user code
1629 1629
1630 1630 This makes it harder (but not impossible) for custom exception
1631 1631 handlers to crash IPython.
1632 1632 """
1633 1633 try:
1634 1634 stb = handler(self,etype,value,tb,tb_offset=tb_offset)
1635 1635 return validate_stb(stb)
1636 1636 except:
1637 1637 # clear custom handler immediately
1638 1638 self.set_custom_exc((), None)
1639 1639 print("Custom TB Handler failed, unregistering", file=io.stderr)
1640 1640 # show the exception in handler first
1641 1641 stb = self.InteractiveTB.structured_traceback(*sys.exc_info())
1642 1642 print(self.InteractiveTB.stb2text(stb), file=io.stdout)
1643 1643 print("The original exception:", file=io.stdout)
1644 1644 stb = self.InteractiveTB.structured_traceback(
1645 1645 (etype,value,tb), tb_offset=tb_offset
1646 1646 )
1647 1647 return stb
1648 1648
1649 1649 self.CustomTB = types.MethodType(wrapped,self)
1650 1650 self.custom_exceptions = exc_tuple
1651 1651
1652 1652 def excepthook(self, etype, value, tb):
1653 1653 """One more defense for GUI apps that call sys.excepthook.
1654 1654
1655 1655 GUI frameworks like wxPython trap exceptions and call
1656 1656 sys.excepthook themselves. I guess this is a feature that
1657 1657 enables them to keep running after exceptions that would
1658 1658 otherwise kill their mainloop. This is a bother for IPython
1659 1659 which excepts to catch all of the program exceptions with a try:
1660 1660 except: statement.
1661 1661
1662 1662 Normally, IPython sets sys.excepthook to a CrashHandler instance, so if
1663 1663 any app directly invokes sys.excepthook, it will look to the user like
1664 1664 IPython crashed. In order to work around this, we can disable the
1665 1665 CrashHandler and replace it with this excepthook instead, which prints a
1666 1666 regular traceback using our InteractiveTB. In this fashion, apps which
1667 1667 call sys.excepthook will generate a regular-looking exception from
1668 1668 IPython, and the CrashHandler will only be triggered by real IPython
1669 1669 crashes.
1670 1670
1671 1671 This hook should be used sparingly, only in places which are not likely
1672 1672 to be true IPython errors.
1673 1673 """
1674 1674 self.showtraceback((etype,value,tb),tb_offset=0)
1675 1675
1676 1676 def _get_exc_info(self, exc_tuple=None):
1677 1677 """get exc_info from a given tuple, sys.exc_info() or sys.last_type etc.
1678 1678
1679 1679 Ensures sys.last_type,value,traceback hold the exc_info we found,
1680 1680 from whichever source.
1681 1681
1682 1682 raises ValueError if none of these contain any information
1683 1683 """
1684 1684 if exc_tuple is None:
1685 1685 etype, value, tb = sys.exc_info()
1686 1686 else:
1687 1687 etype, value, tb = exc_tuple
1688 1688
1689 1689 if etype is None:
1690 1690 if hasattr(sys, 'last_type'):
1691 1691 etype, value, tb = sys.last_type, sys.last_value, \
1692 1692 sys.last_traceback
1693 1693
1694 1694 if etype is None:
1695 1695 raise ValueError("No exception to find")
1696 1696
1697 1697 # Now store the exception info in sys.last_type etc.
1698 1698 # WARNING: these variables are somewhat deprecated and not
1699 1699 # necessarily safe to use in a threaded environment, but tools
1700 1700 # like pdb depend on their existence, so let's set them. If we
1701 1701 # find problems in the field, we'll need to revisit their use.
1702 1702 sys.last_type = etype
1703 1703 sys.last_value = value
1704 1704 sys.last_traceback = tb
1705 1705
1706 1706 return etype, value, tb
1707 1707
1708 1708 def show_usage_error(self, exc):
1709 1709 """Show a short message for UsageErrors
1710 1710
1711 1711 These are special exceptions that shouldn't show a traceback.
1712 1712 """
1713 1713 self.write_err("UsageError: %s" % exc)
1714 1714
1715 1715 def showtraceback(self,exc_tuple = None,filename=None,tb_offset=None,
1716 1716 exception_only=False):
1717 1717 """Display the exception that just occurred.
1718 1718
1719 1719 If nothing is known about the exception, this is the method which
1720 1720 should be used throughout the code for presenting user tracebacks,
1721 1721 rather than directly invoking the InteractiveTB object.
1722 1722
1723 1723 A specific showsyntaxerror() also exists, but this method can take
1724 1724 care of calling it if needed, so unless you are explicitly catching a
1725 1725 SyntaxError exception, don't try to analyze the stack manually and
1726 1726 simply call this method."""
1727 1727
1728 1728 try:
1729 1729 try:
1730 1730 etype, value, tb = self._get_exc_info(exc_tuple)
1731 1731 except ValueError:
1732 1732 self.write_err('No traceback available to show.\n')
1733 1733 return
1734 1734
1735 1735 if issubclass(etype, SyntaxError):
1736 1736 # Though this won't be called by syntax errors in the input
1737 1737 # line, there may be SyntaxError cases with imported code.
1738 1738 self.showsyntaxerror(filename)
1739 1739 elif etype is UsageError:
1740 1740 self.show_usage_error(value)
1741 1741 else:
1742 1742 if exception_only:
1743 1743 stb = ['An exception has occurred, use %tb to see '
1744 1744 'the full traceback.\n']
1745 1745 stb.extend(self.InteractiveTB.get_exception_only(etype,
1746 1746 value))
1747 1747 else:
1748 1748 try:
1749 1749 # Exception classes can customise their traceback - we
1750 1750 # use this in IPython.parallel for exceptions occurring
1751 1751 # in the engines. This should return a list of strings.
1752 1752 stb = value._render_traceback_()
1753 1753 except Exception:
1754 1754 stb = self.InteractiveTB.structured_traceback(etype,
1755 1755 value, tb, tb_offset=tb_offset)
1756 1756
1757 1757 self._showtraceback(etype, value, stb)
1758 1758 if self.call_pdb:
1759 1759 # drop into debugger
1760 1760 self.debugger(force=True)
1761 1761 return
1762 1762
1763 1763 # Actually show the traceback
1764 1764 self._showtraceback(etype, value, stb)
1765 1765
1766 1766 except KeyboardInterrupt:
1767 1767 self.write_err("\nKeyboardInterrupt\n")
1768 1768
1769 1769 def _showtraceback(self, etype, evalue, stb):
1770 1770 """Actually show a traceback.
1771 1771
1772 1772 Subclasses may override this method to put the traceback on a different
1773 1773 place, like a side channel.
1774 1774 """
1775 1775 print(self.InteractiveTB.stb2text(stb), file=io.stdout)
1776 1776
1777 1777 def showsyntaxerror(self, filename=None):
1778 1778 """Display the syntax error that just occurred.
1779 1779
1780 1780 This doesn't display a stack trace because there isn't one.
1781 1781
1782 1782 If a filename is given, it is stuffed in the exception instead
1783 1783 of what was there before (because Python's parser always uses
1784 1784 "<string>" when reading from a string).
1785 1785 """
1786 1786 etype, value, last_traceback = self._get_exc_info()
1787 1787
1788 1788 if filename and issubclass(etype, SyntaxError):
1789 1789 try:
1790 1790 value.filename = filename
1791 1791 except:
1792 1792 # Not the format we expect; leave it alone
1793 1793 pass
1794 1794
1795 1795 stb = self.SyntaxTB.structured_traceback(etype, value, [])
1796 1796 self._showtraceback(etype, value, stb)
1797 1797
1798 1798 # This is overridden in TerminalInteractiveShell to show a message about
1799 1799 # the %paste magic.
1800 1800 def showindentationerror(self):
1801 1801 """Called by run_cell when there's an IndentationError in code entered
1802 1802 at the prompt.
1803 1803
1804 1804 This is overridden in TerminalInteractiveShell to show a message about
1805 1805 the %paste magic."""
1806 1806 self.showsyntaxerror()
1807 1807
1808 1808 #-------------------------------------------------------------------------
1809 1809 # Things related to readline
1810 1810 #-------------------------------------------------------------------------
1811 1811
1812 1812 def init_readline(self):
1813 1813 """Command history completion/saving/reloading."""
1814 1814
1815 1815 if self.readline_use:
1816 1816 import IPython.utils.rlineimpl as readline
1817 1817
1818 1818 self.rl_next_input = None
1819 1819 self.rl_do_indent = False
1820 1820
1821 1821 if not self.readline_use or not readline.have_readline:
1822 1822 self.has_readline = False
1823 1823 self.readline = None
1824 1824 # Set a number of methods that depend on readline to be no-op
1825 1825 self.readline_no_record = no_op_context
1826 1826 self.set_readline_completer = no_op
1827 1827 self.set_custom_completer = no_op
1828 1828 if self.readline_use:
1829 1829 warn('Readline services not available or not loaded.')
1830 1830 else:
1831 1831 self.has_readline = True
1832 1832 self.readline = readline
1833 1833 sys.modules['readline'] = readline
1834 1834
1835 1835 # Platform-specific configuration
1836 1836 if os.name == 'nt':
1837 1837 # FIXME - check with Frederick to see if we can harmonize
1838 1838 # naming conventions with pyreadline to avoid this
1839 1839 # platform-dependent check
1840 1840 self.readline_startup_hook = readline.set_pre_input_hook
1841 1841 else:
1842 1842 self.readline_startup_hook = readline.set_startup_hook
1843 1843
1844 1844 # Load user's initrc file (readline config)
1845 1845 # Or if libedit is used, load editrc.
1846 1846 inputrc_name = os.environ.get('INPUTRC')
1847 1847 if inputrc_name is None:
1848 1848 inputrc_name = '.inputrc'
1849 1849 if readline.uses_libedit:
1850 1850 inputrc_name = '.editrc'
1851 1851 inputrc_name = os.path.join(self.home_dir, inputrc_name)
1852 1852 if os.path.isfile(inputrc_name):
1853 1853 try:
1854 1854 readline.read_init_file(inputrc_name)
1855 1855 except:
1856 1856 warn('Problems reading readline initialization file <%s>'
1857 1857 % inputrc_name)
1858 1858
1859 1859 # Configure readline according to user's prefs
1860 1860 # This is only done if GNU readline is being used. If libedit
1861 1861 # is being used (as on Leopard) the readline config is
1862 1862 # not run as the syntax for libedit is different.
1863 1863 if not readline.uses_libedit:
1864 1864 for rlcommand in self.readline_parse_and_bind:
1865 1865 #print "loading rl:",rlcommand # dbg
1866 1866 readline.parse_and_bind(rlcommand)
1867 1867
1868 1868 # Remove some chars from the delimiters list. If we encounter
1869 1869 # unicode chars, discard them.
1870 1870 delims = readline.get_completer_delims()
1871 1871 if not py3compat.PY3:
1872 1872 delims = delims.encode("ascii", "ignore")
1873 1873 for d in self.readline_remove_delims:
1874 1874 delims = delims.replace(d, "")
1875 1875 delims = delims.replace(ESC_MAGIC, '')
1876 1876 readline.set_completer_delims(delims)
1877 1877 # Store these so we can restore them if something like rpy2 modifies
1878 1878 # them.
1879 1879 self.readline_delims = delims
1880 1880 # otherwise we end up with a monster history after a while:
1881 1881 readline.set_history_length(self.history_length)
1882 1882
1883 1883 self.refill_readline_hist()
1884 1884 self.readline_no_record = ReadlineNoRecord(self)
1885 1885
1886 1886 # Configure auto-indent for all platforms
1887 1887 self.set_autoindent(self.autoindent)
1888 1888
1889 1889 def refill_readline_hist(self):
1890 1890 # Load the last 1000 lines from history
1891 1891 self.readline.clear_history()
1892 1892 stdin_encoding = sys.stdin.encoding or "utf-8"
1893 1893 last_cell = u""
1894 1894 for _, _, cell in self.history_manager.get_tail(1000,
1895 1895 include_latest=True):
1896 1896 # Ignore blank lines and consecutive duplicates
1897 1897 cell = cell.rstrip()
1898 1898 if cell and (cell != last_cell):
1899 1899 try:
1900 1900 if self.multiline_history:
1901 1901 self.readline.add_history(py3compat.unicode_to_str(cell,
1902 1902 stdin_encoding))
1903 1903 else:
1904 1904 for line in cell.splitlines():
1905 1905 self.readline.add_history(py3compat.unicode_to_str(line,
1906 1906 stdin_encoding))
1907 1907 last_cell = cell
1908 1908
1909 1909 except TypeError:
1910 1910 # The history DB can get corrupted so it returns strings
1911 1911 # containing null bytes, which readline objects to.
1912 1912 continue
1913 1913
1914 1914 @skip_doctest
1915 1915 def set_next_input(self, s):
1916 1916 """ Sets the 'default' input string for the next command line.
1917 1917
1918 1918 Requires readline.
1919 1919
1920 1920 Example::
1921 1921
1922 1922 In [1]: _ip.set_next_input("Hello Word")
1923 1923 In [2]: Hello Word_ # cursor is here
1924 1924 """
1925 1925 self.rl_next_input = py3compat.cast_bytes_py2(s)
1926 1926
1927 1927 # Maybe move this to the terminal subclass?
1928 1928 def pre_readline(self):
1929 1929 """readline hook to be used at the start of each line.
1930 1930
1931 1931 Currently it handles auto-indent only."""
1932 1932
1933 1933 if self.rl_do_indent:
1934 1934 self.readline.insert_text(self._indent_current_str())
1935 1935 if self.rl_next_input is not None:
1936 1936 self.readline.insert_text(self.rl_next_input)
1937 1937 self.rl_next_input = None
1938 1938
1939 1939 def _indent_current_str(self):
1940 1940 """return the current level of indentation as a string"""
1941 1941 return self.input_splitter.indent_spaces * ' '
1942 1942
1943 1943 #-------------------------------------------------------------------------
1944 1944 # Things related to text completion
1945 1945 #-------------------------------------------------------------------------
1946 1946
1947 1947 def init_completer(self):
1948 1948 """Initialize the completion machinery.
1949 1949
1950 1950 This creates completion machinery that can be used by client code,
1951 1951 either interactively in-process (typically triggered by the readline
1952 1952 library), programatically (such as in test suites) or out-of-prcess
1953 1953 (typically over the network by remote frontends).
1954 1954 """
1955 1955 from IPython.core.completer import IPCompleter
1956 1956 from IPython.core.completerlib import (module_completer,
1957 1957 magic_run_completer, cd_completer, reset_completer)
1958 1958
1959 1959 self.Completer = IPCompleter(shell=self,
1960 1960 namespace=self.user_ns,
1961 1961 global_namespace=self.user_global_ns,
1962 1962 use_readline=self.has_readline,
1963 1963 parent=self,
1964 1964 )
1965 1965 self.configurables.append(self.Completer)
1966 1966
1967 1967 # Add custom completers to the basic ones built into IPCompleter
1968 1968 sdisp = self.strdispatchers.get('complete_command', StrDispatch())
1969 1969 self.strdispatchers['complete_command'] = sdisp
1970 1970 self.Completer.custom_completers = sdisp
1971 1971
1972 1972 self.set_hook('complete_command', module_completer, str_key = 'import')
1973 1973 self.set_hook('complete_command', module_completer, str_key = 'from')
1974 1974 self.set_hook('complete_command', magic_run_completer, str_key = '%run')
1975 1975 self.set_hook('complete_command', cd_completer, str_key = '%cd')
1976 1976 self.set_hook('complete_command', reset_completer, str_key = '%reset')
1977 1977
1978 1978 # Only configure readline if we truly are using readline. IPython can
1979 1979 # do tab-completion over the network, in GUIs, etc, where readline
1980 1980 # itself may be absent
1981 1981 if self.has_readline:
1982 1982 self.set_readline_completer()
1983 1983
1984 1984 def complete(self, text, line=None, cursor_pos=None):
1985 1985 """Return the completed text and a list of completions.
1986 1986
1987 1987 Parameters
1988 1988 ----------
1989 1989
1990 1990 text : string
1991 1991 A string of text to be completed on. It can be given as empty and
1992 1992 instead a line/position pair are given. In this case, the
1993 1993 completer itself will split the line like readline does.
1994 1994
1995 1995 line : string, optional
1996 1996 The complete line that text is part of.
1997 1997
1998 1998 cursor_pos : int, optional
1999 1999 The position of the cursor on the input line.
2000 2000
2001 2001 Returns
2002 2002 -------
2003 2003 text : string
2004 2004 The actual text that was completed.
2005 2005
2006 2006 matches : list
2007 2007 A sorted list with all possible completions.
2008 2008
2009 2009 The optional arguments allow the completion to take more context into
2010 2010 account, and are part of the low-level completion API.
2011 2011
2012 2012 This is a wrapper around the completion mechanism, similar to what
2013 2013 readline does at the command line when the TAB key is hit. By
2014 2014 exposing it as a method, it can be used by other non-readline
2015 2015 environments (such as GUIs) for text completion.
2016 2016
2017 2017 Simple usage example:
2018 2018
2019 2019 In [1]: x = 'hello'
2020 2020
2021 2021 In [2]: _ip.complete('x.l')
2022 2022 Out[2]: ('x.l', ['x.ljust', 'x.lower', 'x.lstrip'])
2023 2023 """
2024 2024
2025 2025 # Inject names into __builtin__ so we can complete on the added names.
2026 2026 with self.builtin_trap:
2027 2027 return self.Completer.complete(text, line, cursor_pos)
2028 2028
2029 2029 def set_custom_completer(self, completer, pos=0):
2030 2030 """Adds a new custom completer function.
2031 2031
2032 2032 The position argument (defaults to 0) is the index in the completers
2033 2033 list where you want the completer to be inserted."""
2034 2034
2035 2035 newcomp = types.MethodType(completer,self.Completer)
2036 2036 self.Completer.matchers.insert(pos,newcomp)
2037 2037
2038 2038 def set_readline_completer(self):
2039 2039 """Reset readline's completer to be our own."""
2040 2040 self.readline.set_completer(self.Completer.rlcomplete)
2041 2041
2042 2042 def set_completer_frame(self, frame=None):
2043 2043 """Set the frame of the completer."""
2044 2044 if frame:
2045 2045 self.Completer.namespace = frame.f_locals
2046 2046 self.Completer.global_namespace = frame.f_globals
2047 2047 else:
2048 2048 self.Completer.namespace = self.user_ns
2049 2049 self.Completer.global_namespace = self.user_global_ns
2050 2050
2051 2051 #-------------------------------------------------------------------------
2052 2052 # Things related to magics
2053 2053 #-------------------------------------------------------------------------
2054 2054
2055 2055 def init_magics(self):
2056 2056 from IPython.core import magics as m
2057 2057 self.magics_manager = magic.MagicsManager(shell=self,
2058 2058 parent=self,
2059 2059 user_magics=m.UserMagics(self))
2060 2060 self.configurables.append(self.magics_manager)
2061 2061
2062 2062 # Expose as public API from the magics manager
2063 2063 self.register_magics = self.magics_manager.register
2064 2064 self.define_magic = self.magics_manager.define_magic
2065 2065
2066 2066 self.register_magics(m.AutoMagics, m.BasicMagics, m.CodeMagics,
2067 2067 m.ConfigMagics, m.DeprecatedMagics, m.DisplayMagics, m.ExecutionMagics,
2068 2068 m.ExtensionMagics, m.HistoryMagics, m.LoggingMagics,
2069 2069 m.NamespaceMagics, m.OSMagics, m.PylabMagics, m.ScriptMagics,
2070 2070 )
2071 2071
2072 2072 # Register Magic Aliases
2073 2073 mman = self.magics_manager
2074 2074 # FIXME: magic aliases should be defined by the Magics classes
2075 2075 # or in MagicsManager, not here
2076 2076 mman.register_alias('ed', 'edit')
2077 2077 mman.register_alias('hist', 'history')
2078 2078 mman.register_alias('rep', 'recall')
2079 2079 mman.register_alias('SVG', 'svg', 'cell')
2080 2080 mman.register_alias('HTML', 'html', 'cell')
2081 2081 mman.register_alias('file', 'writefile', 'cell')
2082 2082
2083 2083 # FIXME: Move the color initialization to the DisplayHook, which
2084 2084 # should be split into a prompt manager and displayhook. We probably
2085 2085 # even need a centralize colors management object.
2086 2086 self.magic('colors %s' % self.colors)
2087 2087
2088 2088 # Defined here so that it's included in the documentation
2089 2089 @functools.wraps(magic.MagicsManager.register_function)
2090 2090 def register_magic_function(self, func, magic_kind='line', magic_name=None):
2091 2091 self.magics_manager.register_function(func,
2092 2092 magic_kind=magic_kind, magic_name=magic_name)
2093 2093
2094 2094 def run_line_magic(self, magic_name, line):
2095 2095 """Execute the given line magic.
2096 2096
2097 2097 Parameters
2098 2098 ----------
2099 2099 magic_name : str
2100 2100 Name of the desired magic function, without '%' prefix.
2101 2101
2102 2102 line : str
2103 2103 The rest of the input line as a single string.
2104 2104 """
2105 2105 fn = self.find_line_magic(magic_name)
2106 2106 if fn is None:
2107 2107 cm = self.find_cell_magic(magic_name)
2108 2108 etpl = "Line magic function `%%%s` not found%s."
2109 2109 extra = '' if cm is None else (' (But cell magic `%%%%%s` exists, '
2110 2110 'did you mean that instead?)' % magic_name )
2111 2111 error(etpl % (magic_name, extra))
2112 2112 else:
2113 2113 # Note: this is the distance in the stack to the user's frame.
2114 2114 # This will need to be updated if the internal calling logic gets
2115 2115 # refactored, or else we'll be expanding the wrong variables.
2116 2116 stack_depth = 2
2117 2117 magic_arg_s = self.var_expand(line, stack_depth)
2118 2118 # Put magic args in a list so we can call with f(*a) syntax
2119 2119 args = [magic_arg_s]
2120 2120 kwargs = {}
2121 2121 # Grab local namespace if we need it:
2122 2122 if getattr(fn, "needs_local_scope", False):
2123 2123 kwargs['local_ns'] = sys._getframe(stack_depth).f_locals
2124 2124 with self.builtin_trap:
2125 2125 result = fn(*args,**kwargs)
2126 2126 return result
2127 2127
2128 2128 def run_cell_magic(self, magic_name, line, cell):
2129 2129 """Execute the given cell magic.
2130 2130
2131 2131 Parameters
2132 2132 ----------
2133 2133 magic_name : str
2134 2134 Name of the desired magic function, without '%' prefix.
2135 2135
2136 2136 line : str
2137 2137 The rest of the first input line as a single string.
2138 2138
2139 2139 cell : str
2140 2140 The body of the cell as a (possibly multiline) string.
2141 2141 """
2142 2142 fn = self.find_cell_magic(magic_name)
2143 2143 if fn is None:
2144 2144 lm = self.find_line_magic(magic_name)
2145 2145 etpl = "Cell magic `%%{0}` not found{1}."
2146 2146 extra = '' if lm is None else (' (But line magic `%{0}` exists, '
2147 2147 'did you mean that instead?)'.format(magic_name))
2148 2148 error(etpl.format(magic_name, extra))
2149 2149 elif cell == '':
2150 2150 message = '%%{0} is a cell magic, but the cell body is empty.'.format(magic_name)
2151 2151 if self.find_line_magic(magic_name) is not None:
2152 2152 message += ' Did you mean the line magic %{0} (single %)?'.format(magic_name)
2153 2153 raise UsageError(message)
2154 2154 else:
2155 2155 # Note: this is the distance in the stack to the user's frame.
2156 2156 # This will need to be updated if the internal calling logic gets
2157 2157 # refactored, or else we'll be expanding the wrong variables.
2158 2158 stack_depth = 2
2159 2159 magic_arg_s = self.var_expand(line, stack_depth)
2160 2160 with self.builtin_trap:
2161 2161 result = fn(magic_arg_s, cell)
2162 2162 return result
2163 2163
2164 2164 def find_line_magic(self, magic_name):
2165 2165 """Find and return a line magic by name.
2166 2166
2167 2167 Returns None if the magic isn't found."""
2168 2168 return self.magics_manager.magics['line'].get(magic_name)
2169 2169
2170 2170 def find_cell_magic(self, magic_name):
2171 2171 """Find and return a cell magic by name.
2172 2172
2173 2173 Returns None if the magic isn't found."""
2174 2174 return self.magics_manager.magics['cell'].get(magic_name)
2175 2175
2176 2176 def find_magic(self, magic_name, magic_kind='line'):
2177 2177 """Find and return a magic of the given type by name.
2178 2178
2179 2179 Returns None if the magic isn't found."""
2180 2180 return self.magics_manager.magics[magic_kind].get(magic_name)
2181 2181
2182 2182 def magic(self, arg_s):
2183 2183 """DEPRECATED. Use run_line_magic() instead.
2184 2184
2185 2185 Call a magic function by name.
2186 2186
2187 2187 Input: a string containing the name of the magic function to call and
2188 2188 any additional arguments to be passed to the magic.
2189 2189
2190 2190 magic('name -opt foo bar') is equivalent to typing at the ipython
2191 2191 prompt:
2192 2192
2193 2193 In[1]: %name -opt foo bar
2194 2194
2195 2195 To call a magic without arguments, simply use magic('name').
2196 2196
2197 2197 This provides a proper Python function to call IPython's magics in any
2198 2198 valid Python code you can type at the interpreter, including loops and
2199 2199 compound statements.
2200 2200 """
2201 2201 # TODO: should we issue a loud deprecation warning here?
2202 2202 magic_name, _, magic_arg_s = arg_s.partition(' ')
2203 2203 magic_name = magic_name.lstrip(prefilter.ESC_MAGIC)
2204 2204 return self.run_line_magic(magic_name, magic_arg_s)
2205 2205
2206 2206 #-------------------------------------------------------------------------
2207 2207 # Things related to macros
2208 2208 #-------------------------------------------------------------------------
2209 2209
2210 2210 def define_macro(self, name, themacro):
2211 2211 """Define a new macro
2212 2212
2213 2213 Parameters
2214 2214 ----------
2215 2215 name : str
2216 2216 The name of the macro.
2217 2217 themacro : str or Macro
2218 2218 The action to do upon invoking the macro. If a string, a new
2219 2219 Macro object is created by passing the string to it.
2220 2220 """
2221 2221
2222 2222 from IPython.core import macro
2223 2223
2224 2224 if isinstance(themacro, string_types):
2225 2225 themacro = macro.Macro(themacro)
2226 2226 if not isinstance(themacro, macro.Macro):
2227 2227 raise ValueError('A macro must be a string or a Macro instance.')
2228 2228 self.user_ns[name] = themacro
2229 2229
2230 2230 #-------------------------------------------------------------------------
2231 2231 # Things related to the running of system commands
2232 2232 #-------------------------------------------------------------------------
2233 2233
2234 2234 def system_piped(self, cmd):
2235 2235 """Call the given cmd in a subprocess, piping stdout/err
2236 2236
2237 2237 Parameters
2238 2238 ----------
2239 2239 cmd : str
2240 2240 Command to execute (can not end in '&', as background processes are
2241 2241 not supported. Should not be a command that expects input
2242 2242 other than simple text.
2243 2243 """
2244 2244 if cmd.rstrip().endswith('&'):
2245 2245 # this is *far* from a rigorous test
2246 2246 # We do not support backgrounding processes because we either use
2247 2247 # pexpect or pipes to read from. Users can always just call
2248 2248 # os.system() or use ip.system=ip.system_raw
2249 2249 # if they really want a background process.
2250 2250 raise OSError("Background processes not supported.")
2251 2251
2252 2252 # we explicitly do NOT return the subprocess status code, because
2253 2253 # a non-None value would trigger :func:`sys.displayhook` calls.
2254 2254 # Instead, we store the exit_code in user_ns.
2255 2255 self.user_ns['_exit_code'] = system(self.var_expand(cmd, depth=1))
2256 2256
2257 2257 def system_raw(self, cmd):
2258 2258 """Call the given cmd in a subprocess using os.system on Windows or
2259 2259 subprocess.call using the system shell on other platforms.
2260 2260
2261 2261 Parameters
2262 2262 ----------
2263 2263 cmd : str
2264 2264 Command to execute.
2265 2265 """
2266 2266 cmd = self.var_expand(cmd, depth=1)
2267 2267 # protect os.system from UNC paths on Windows, which it can't handle:
2268 2268 if sys.platform == 'win32':
2269 2269 from IPython.utils._process_win32 import AvoidUNCPath
2270 2270 with AvoidUNCPath() as path:
2271 2271 if path is not None:
2272 2272 cmd = '"pushd %s &&"%s' % (path, cmd)
2273 2273 cmd = py3compat.unicode_to_str(cmd)
2274 2274 ec = os.system(cmd)
2275 2275 else:
2276 2276 cmd = py3compat.unicode_to_str(cmd)
2277 2277 # Call the cmd using the OS shell, instead of the default /bin/sh, if set.
2278 2278 ec = subprocess.call(cmd, shell=True, executable=os.environ.get('SHELL', None))
2279 2279 # exit code is positive for program failure, or negative for
2280 2280 # terminating signal number.
2281 2281
2282 2282 # We explicitly do NOT return the subprocess status code, because
2283 2283 # a non-None value would trigger :func:`sys.displayhook` calls.
2284 2284 # Instead, we store the exit_code in user_ns.
2285 2285 self.user_ns['_exit_code'] = ec
2286 2286
2287 2287 # use piped system by default, because it is better behaved
2288 2288 system = system_piped
2289 2289
2290 2290 def getoutput(self, cmd, split=True, depth=0):
2291 2291 """Get output (possibly including stderr) from a subprocess.
2292 2292
2293 2293 Parameters
2294 2294 ----------
2295 2295 cmd : str
2296 2296 Command to execute (can not end in '&', as background processes are
2297 2297 not supported.
2298 2298 split : bool, optional
2299 2299 If True, split the output into an IPython SList. Otherwise, an
2300 2300 IPython LSString is returned. These are objects similar to normal
2301 2301 lists and strings, with a few convenience attributes for easier
2302 2302 manipulation of line-based output. You can use '?' on them for
2303 2303 details.
2304 2304 depth : int, optional
2305 2305 How many frames above the caller are the local variables which should
2306 2306 be expanded in the command string? The default (0) assumes that the
2307 2307 expansion variables are in the stack frame calling this function.
2308 2308 """
2309 2309 if cmd.rstrip().endswith('&'):
2310 2310 # this is *far* from a rigorous test
2311 2311 raise OSError("Background processes not supported.")
2312 2312 out = getoutput(self.var_expand(cmd, depth=depth+1))
2313 2313 if split:
2314 2314 out = SList(out.splitlines())
2315 2315 else:
2316 2316 out = LSString(out)
2317 2317 return out
2318 2318
2319 2319 #-------------------------------------------------------------------------
2320 2320 # Things related to aliases
2321 2321 #-------------------------------------------------------------------------
2322 2322
2323 2323 def init_alias(self):
2324 2324 self.alias_manager = AliasManager(shell=self, parent=self)
2325 2325 self.configurables.append(self.alias_manager)
2326 2326
2327 2327 #-------------------------------------------------------------------------
2328 2328 # Things related to extensions
2329 2329 #-------------------------------------------------------------------------
2330 2330
2331 2331 def init_extension_manager(self):
2332 2332 self.extension_manager = ExtensionManager(shell=self, parent=self)
2333 2333 self.configurables.append(self.extension_manager)
2334 2334
2335 2335 #-------------------------------------------------------------------------
2336 2336 # Things related to payloads
2337 2337 #-------------------------------------------------------------------------
2338 2338
2339 2339 def init_payload(self):
2340 2340 self.payload_manager = PayloadManager(parent=self)
2341 2341 self.configurables.append(self.payload_manager)
2342 2342
2343 2343 #-------------------------------------------------------------------------
2344 2344 # Things related to widgets
2345 2345 #-------------------------------------------------------------------------
2346 2346
2347 2347 def init_comms(self):
2348 2348 # not implemented in the base class
2349 2349 pass
2350 2350
2351 2351 #-------------------------------------------------------------------------
2352 2352 # Things related to the prefilter
2353 2353 #-------------------------------------------------------------------------
2354 2354
2355 2355 def init_prefilter(self):
2356 2356 self.prefilter_manager = PrefilterManager(shell=self, parent=self)
2357 2357 self.configurables.append(self.prefilter_manager)
2358 2358 # Ultimately this will be refactored in the new interpreter code, but
2359 2359 # for now, we should expose the main prefilter method (there's legacy
2360 2360 # code out there that may rely on this).
2361 2361 self.prefilter = self.prefilter_manager.prefilter_lines
2362 2362
2363 2363 def auto_rewrite_input(self, cmd):
2364 2364 """Print to the screen the rewritten form of the user's command.
2365 2365
2366 2366 This shows visual feedback by rewriting input lines that cause
2367 2367 automatic calling to kick in, like::
2368 2368
2369 2369 /f x
2370 2370
2371 2371 into::
2372 2372
2373 2373 ------> f(x)
2374 2374
2375 2375 after the user's input prompt. This helps the user understand that the
2376 2376 input line was transformed automatically by IPython.
2377 2377 """
2378 2378 if not self.show_rewritten_input:
2379 2379 return
2380 2380
2381 2381 rw = self.prompt_manager.render('rewrite') + cmd
2382 2382
2383 2383 try:
2384 2384 # plain ascii works better w/ pyreadline, on some machines, so
2385 2385 # we use it and only print uncolored rewrite if we have unicode
2386 2386 rw = str(rw)
2387 2387 print(rw, file=io.stdout)
2388 2388 except UnicodeEncodeError:
2389 2389 print("------> " + cmd)
2390 2390
2391 2391 #-------------------------------------------------------------------------
2392 2392 # Things related to extracting values/expressions from kernel and user_ns
2393 2393 #-------------------------------------------------------------------------
2394 2394
2395 2395 def _user_obj_error(self):
2396 2396 """return simple exception dict
2397 2397
2398 for use in user_variables / expressions
2398 for use in user_expressions
2399 2399 """
2400 2400
2401 2401 etype, evalue, tb = self._get_exc_info()
2402 2402 stb = self.InteractiveTB.get_exception_only(etype, evalue)
2403 2403
2404 2404 exc_info = {
2405 2405 u'status' : 'error',
2406 2406 u'traceback' : stb,
2407 2407 u'ename' : unicode_type(etype.__name__),
2408 2408 u'evalue' : py3compat.safe_unicode(evalue),
2409 2409 }
2410 2410
2411 2411 return exc_info
2412 2412
2413 2413 def _format_user_obj(self, obj):
2414 2414 """format a user object to display dict
2415 2415
2416 for use in user_expressions / variables
2416 for use in user_expressions
2417 2417 """
2418 2418
2419 2419 data, md = self.display_formatter.format(obj)
2420 2420 value = {
2421 2421 'status' : 'ok',
2422 2422 'data' : data,
2423 2423 'metadata' : md,
2424 2424 }
2425 2425 return value
2426 2426
2427 def user_variables(self, names):
2428 """Get a list of variable names from the user's namespace.
2429
2430 Parameters
2431 ----------
2432 names : list of strings
2433 A list of names of variables to be read from the user namespace.
2434
2435 Returns
2436 -------
2437 A dict, keyed by the input names and with the rich mime-type repr(s) of each value.
2438 Each element will be a sub-dict of the same form as a display_data message.
2439 """
2440 out = {}
2441 user_ns = self.user_ns
2442
2443 for varname in names:
2444 try:
2445 value = self._format_user_obj(user_ns[varname])
2446 except:
2447 value = self._user_obj_error()
2448 out[varname] = value
2449 return out
2450
2451 2427 def user_expressions(self, expressions):
2452 2428 """Evaluate a dict of expressions in the user's namespace.
2453 2429
2454 2430 Parameters
2455 2431 ----------
2456 2432 expressions : dict
2457 2433 A dict with string keys and string values. The expression values
2458 2434 should be valid Python expressions, each of which will be evaluated
2459 2435 in the user namespace.
2460 2436
2461 2437 Returns
2462 2438 -------
2463 2439 A dict, keyed like the input expressions dict, with the rich mime-typed
2464 2440 display_data of each value.
2465 2441 """
2466 2442 out = {}
2467 2443 user_ns = self.user_ns
2468 2444 global_ns = self.user_global_ns
2469 2445
2470 2446 for key, expr in iteritems(expressions):
2471 2447 try:
2472 2448 value = self._format_user_obj(eval(expr, global_ns, user_ns))
2473 2449 except:
2474 2450 value = self._user_obj_error()
2475 2451 out[key] = value
2476 2452 return out
2477 2453
2478 2454 #-------------------------------------------------------------------------
2479 2455 # Things related to the running of code
2480 2456 #-------------------------------------------------------------------------
2481 2457
2482 2458 def ex(self, cmd):
2483 2459 """Execute a normal python statement in user namespace."""
2484 2460 with self.builtin_trap:
2485 2461 exec(cmd, self.user_global_ns, self.user_ns)
2486 2462
2487 2463 def ev(self, expr):
2488 2464 """Evaluate python expression expr in user namespace.
2489 2465
2490 2466 Returns the result of evaluation
2491 2467 """
2492 2468 with self.builtin_trap:
2493 2469 return eval(expr, self.user_global_ns, self.user_ns)
2494 2470
2495 2471 def safe_execfile(self, fname, *where, **kw):
2496 2472 """A safe version of the builtin execfile().
2497 2473
2498 2474 This version will never throw an exception, but instead print
2499 2475 helpful error messages to the screen. This only works on pure
2500 2476 Python files with the .py extension.
2501 2477
2502 2478 Parameters
2503 2479 ----------
2504 2480 fname : string
2505 2481 The name of the file to be executed.
2506 2482 where : tuple
2507 2483 One or two namespaces, passed to execfile() as (globals,locals).
2508 2484 If only one is given, it is passed as both.
2509 2485 exit_ignore : bool (False)
2510 2486 If True, then silence SystemExit for non-zero status (it is always
2511 2487 silenced for zero status, as it is so common).
2512 2488 raise_exceptions : bool (False)
2513 2489 If True raise exceptions everywhere. Meant for testing.
2514 2490
2515 2491 """
2516 2492 kw.setdefault('exit_ignore', False)
2517 2493 kw.setdefault('raise_exceptions', False)
2518 2494
2519 2495 fname = os.path.abspath(os.path.expanduser(fname))
2520 2496
2521 2497 # Make sure we can open the file
2522 2498 try:
2523 2499 with open(fname) as thefile:
2524 2500 pass
2525 2501 except:
2526 2502 warn('Could not open file <%s> for safe execution.' % fname)
2527 2503 return
2528 2504
2529 2505 # Find things also in current directory. This is needed to mimic the
2530 2506 # behavior of running a script from the system command line, where
2531 2507 # Python inserts the script's directory into sys.path
2532 2508 dname = os.path.dirname(fname)
2533 2509
2534 2510 with prepended_to_syspath(dname):
2535 2511 try:
2536 2512 py3compat.execfile(fname,*where)
2537 2513 except SystemExit as status:
2538 2514 # If the call was made with 0 or None exit status (sys.exit(0)
2539 2515 # or sys.exit() ), don't bother showing a traceback, as both of
2540 2516 # these are considered normal by the OS:
2541 2517 # > python -c'import sys;sys.exit(0)'; echo $?
2542 2518 # 0
2543 2519 # > python -c'import sys;sys.exit()'; echo $?
2544 2520 # 0
2545 2521 # For other exit status, we show the exception unless
2546 2522 # explicitly silenced, but only in short form.
2547 2523 if kw['raise_exceptions']:
2548 2524 raise
2549 2525 if status.code and not kw['exit_ignore']:
2550 2526 self.showtraceback(exception_only=True)
2551 2527 except:
2552 2528 if kw['raise_exceptions']:
2553 2529 raise
2554 2530 # tb offset is 2 because we wrap execfile
2555 2531 self.showtraceback(tb_offset=2)
2556 2532
2557 2533 def safe_execfile_ipy(self, fname):
2558 2534 """Like safe_execfile, but for .ipy or .ipynb files with IPython syntax.
2559 2535
2560 2536 Parameters
2561 2537 ----------
2562 2538 fname : str
2563 2539 The name of the file to execute. The filename must have a
2564 2540 .ipy or .ipynb extension.
2565 2541 """
2566 2542 fname = os.path.abspath(os.path.expanduser(fname))
2567 2543
2568 2544 # Make sure we can open the file
2569 2545 try:
2570 2546 with open(fname) as thefile:
2571 2547 pass
2572 2548 except:
2573 2549 warn('Could not open file <%s> for safe execution.' % fname)
2574 2550 return
2575 2551
2576 2552 # Find things also in current directory. This is needed to mimic the
2577 2553 # behavior of running a script from the system command line, where
2578 2554 # Python inserts the script's directory into sys.path
2579 2555 dname = os.path.dirname(fname)
2580 2556
2581 2557 def get_cells():
2582 2558 """generator for sequence of code blocks to run"""
2583 2559 if fname.endswith('.ipynb'):
2584 2560 from IPython.nbformat import current
2585 2561 with open(fname) as f:
2586 2562 nb = current.read(f, 'json')
2587 2563 if not nb.worksheets:
2588 2564 return
2589 2565 for cell in nb.worksheets[0].cells:
2590 2566 if cell.cell_type == 'code':
2591 2567 yield cell.input
2592 2568 else:
2593 2569 with open(fname) as f:
2594 2570 yield f.read()
2595 2571
2596 2572 with prepended_to_syspath(dname):
2597 2573 try:
2598 2574 for cell in get_cells():
2599 2575 # self.run_cell currently captures all exceptions
2600 2576 # raised in user code. It would be nice if there were
2601 2577 # versions of run_cell that did raise, so
2602 2578 # we could catch the errors.
2603 2579 self.run_cell(cell, silent=True, shell_futures=False)
2604 2580 except:
2605 2581 self.showtraceback()
2606 2582 warn('Unknown failure executing file: <%s>' % fname)
2607 2583
2608 2584 def safe_run_module(self, mod_name, where):
2609 2585 """A safe version of runpy.run_module().
2610 2586
2611 2587 This version will never throw an exception, but instead print
2612 2588 helpful error messages to the screen.
2613 2589
2614 2590 `SystemExit` exceptions with status code 0 or None are ignored.
2615 2591
2616 2592 Parameters
2617 2593 ----------
2618 2594 mod_name : string
2619 2595 The name of the module to be executed.
2620 2596 where : dict
2621 2597 The globals namespace.
2622 2598 """
2623 2599 try:
2624 2600 try:
2625 2601 where.update(
2626 2602 runpy.run_module(str(mod_name), run_name="__main__",
2627 2603 alter_sys=True)
2628 2604 )
2629 2605 except SystemExit as status:
2630 2606 if status.code:
2631 2607 raise
2632 2608 except:
2633 2609 self.showtraceback()
2634 2610 warn('Unknown failure executing module: <%s>' % mod_name)
2635 2611
2636 2612 def _run_cached_cell_magic(self, magic_name, line):
2637 2613 """Special method to call a cell magic with the data stored in self.
2638 2614 """
2639 2615 cell = self._current_cell_magic_body
2640 2616 self._current_cell_magic_body = None
2641 2617 return self.run_cell_magic(magic_name, line, cell)
2642 2618
2643 2619 def run_cell(self, raw_cell, store_history=False, silent=False, shell_futures=True):
2644 2620 """Run a complete IPython cell.
2645 2621
2646 2622 Parameters
2647 2623 ----------
2648 2624 raw_cell : str
2649 2625 The code (including IPython code such as %magic functions) to run.
2650 2626 store_history : bool
2651 2627 If True, the raw and translated cell will be stored in IPython's
2652 2628 history. For user code calling back into IPython's machinery, this
2653 2629 should be set to False.
2654 2630 silent : bool
2655 2631 If True, avoid side-effects, such as implicit displayhooks and
2656 2632 and logging. silent=True forces store_history=False.
2657 2633 shell_futures : bool
2658 2634 If True, the code will share future statements with the interactive
2659 2635 shell. It will both be affected by previous __future__ imports, and
2660 2636 any __future__ imports in the code will affect the shell. If False,
2661 2637 __future__ imports are not shared in either direction.
2662 2638 """
2663 2639 if (not raw_cell) or raw_cell.isspace():
2664 2640 return
2665 2641
2666 2642 if silent:
2667 2643 store_history = False
2668 2644
2669 2645 self.events.trigger('pre_execute')
2670 2646 if not silent:
2671 2647 self.events.trigger('pre_run_cell')
2672 2648
2673 2649 # If any of our input transformation (input_transformer_manager or
2674 2650 # prefilter_manager) raises an exception, we store it in this variable
2675 2651 # so that we can display the error after logging the input and storing
2676 2652 # it in the history.
2677 2653 preprocessing_exc_tuple = None
2678 2654 try:
2679 2655 # Static input transformations
2680 2656 cell = self.input_transformer_manager.transform_cell(raw_cell)
2681 2657 except SyntaxError:
2682 2658 preprocessing_exc_tuple = sys.exc_info()
2683 2659 cell = raw_cell # cell has to exist so it can be stored/logged
2684 2660 else:
2685 2661 if len(cell.splitlines()) == 1:
2686 2662 # Dynamic transformations - only applied for single line commands
2687 2663 with self.builtin_trap:
2688 2664 try:
2689 2665 # use prefilter_lines to handle trailing newlines
2690 2666 # restore trailing newline for ast.parse
2691 2667 cell = self.prefilter_manager.prefilter_lines(cell) + '\n'
2692 2668 except Exception:
2693 2669 # don't allow prefilter errors to crash IPython
2694 2670 preprocessing_exc_tuple = sys.exc_info()
2695 2671
2696 2672 # Store raw and processed history
2697 2673 if store_history:
2698 2674 self.history_manager.store_inputs(self.execution_count,
2699 2675 cell, raw_cell)
2700 2676 if not silent:
2701 2677 self.logger.log(cell, raw_cell)
2702 2678
2703 2679 # Display the exception if input processing failed.
2704 2680 if preprocessing_exc_tuple is not None:
2705 2681 self.showtraceback(preprocessing_exc_tuple)
2706 2682 if store_history:
2707 2683 self.execution_count += 1
2708 2684 return
2709 2685
2710 2686 # Our own compiler remembers the __future__ environment. If we want to
2711 2687 # run code with a separate __future__ environment, use the default
2712 2688 # compiler
2713 2689 compiler = self.compile if shell_futures else CachingCompiler()
2714 2690
2715 2691 with self.builtin_trap:
2716 2692 cell_name = self.compile.cache(cell, self.execution_count)
2717 2693
2718 2694 with self.display_trap:
2719 2695 # Compile to bytecode
2720 2696 try:
2721 2697 code_ast = compiler.ast_parse(cell, filename=cell_name)
2722 2698 except IndentationError:
2723 2699 self.showindentationerror()
2724 2700 if store_history:
2725 2701 self.execution_count += 1
2726 2702 return None
2727 2703 except (OverflowError, SyntaxError, ValueError, TypeError,
2728 2704 MemoryError):
2729 2705 self.showsyntaxerror()
2730 2706 if store_history:
2731 2707 self.execution_count += 1
2732 2708 return None
2733 2709
2734 2710 # Apply AST transformations
2735 2711 code_ast = self.transform_ast(code_ast)
2736 2712
2737 2713 # Execute the user code
2738 2714 interactivity = "none" if silent else self.ast_node_interactivity
2739 2715 self.run_ast_nodes(code_ast.body, cell_name,
2740 2716 interactivity=interactivity, compiler=compiler)
2741 2717
2742 2718 self.events.trigger('post_execute')
2743 2719 if not silent:
2744 2720 self.events.trigger('post_run_cell')
2745 2721
2746 2722 if store_history:
2747 2723 # Write output to the database. Does nothing unless
2748 2724 # history output logging is enabled.
2749 2725 self.history_manager.store_output(self.execution_count)
2750 2726 # Each cell is a *single* input, regardless of how many lines it has
2751 2727 self.execution_count += 1
2752 2728
2753 2729 def transform_ast(self, node):
2754 2730 """Apply the AST transformations from self.ast_transformers
2755 2731
2756 2732 Parameters
2757 2733 ----------
2758 2734 node : ast.Node
2759 2735 The root node to be transformed. Typically called with the ast.Module
2760 2736 produced by parsing user input.
2761 2737
2762 2738 Returns
2763 2739 -------
2764 2740 An ast.Node corresponding to the node it was called with. Note that it
2765 2741 may also modify the passed object, so don't rely on references to the
2766 2742 original AST.
2767 2743 """
2768 2744 for transformer in self.ast_transformers:
2769 2745 try:
2770 2746 node = transformer.visit(node)
2771 2747 except Exception:
2772 2748 warn("AST transformer %r threw an error. It will be unregistered." % transformer)
2773 2749 self.ast_transformers.remove(transformer)
2774 2750
2775 2751 if self.ast_transformers:
2776 2752 ast.fix_missing_locations(node)
2777 2753 return node
2778 2754
2779 2755
2780 2756 def run_ast_nodes(self, nodelist, cell_name, interactivity='last_expr',
2781 2757 compiler=compile):
2782 2758 """Run a sequence of AST nodes. The execution mode depends on the
2783 2759 interactivity parameter.
2784 2760
2785 2761 Parameters
2786 2762 ----------
2787 2763 nodelist : list
2788 2764 A sequence of AST nodes to run.
2789 2765 cell_name : str
2790 2766 Will be passed to the compiler as the filename of the cell. Typically
2791 2767 the value returned by ip.compile.cache(cell).
2792 2768 interactivity : str
2793 2769 'all', 'last', 'last_expr' or 'none', specifying which nodes should be
2794 2770 run interactively (displaying output from expressions). 'last_expr'
2795 2771 will run the last node interactively only if it is an expression (i.e.
2796 2772 expressions in loops or other blocks are not displayed. Other values
2797 2773 for this parameter will raise a ValueError.
2798 2774 compiler : callable
2799 2775 A function with the same interface as the built-in compile(), to turn
2800 2776 the AST nodes into code objects. Default is the built-in compile().
2801 2777 """
2802 2778 if not nodelist:
2803 2779 return
2804 2780
2805 2781 if interactivity == 'last_expr':
2806 2782 if isinstance(nodelist[-1], ast.Expr):
2807 2783 interactivity = "last"
2808 2784 else:
2809 2785 interactivity = "none"
2810 2786
2811 2787 if interactivity == 'none':
2812 2788 to_run_exec, to_run_interactive = nodelist, []
2813 2789 elif interactivity == 'last':
2814 2790 to_run_exec, to_run_interactive = nodelist[:-1], nodelist[-1:]
2815 2791 elif interactivity == 'all':
2816 2792 to_run_exec, to_run_interactive = [], nodelist
2817 2793 else:
2818 2794 raise ValueError("Interactivity was %r" % interactivity)
2819 2795
2820 2796 exec_count = self.execution_count
2821 2797
2822 2798 try:
2823 2799 for i, node in enumerate(to_run_exec):
2824 2800 mod = ast.Module([node])
2825 2801 code = compiler(mod, cell_name, "exec")
2826 2802 if self.run_code(code):
2827 2803 return True
2828 2804
2829 2805 for i, node in enumerate(to_run_interactive):
2830 2806 mod = ast.Interactive([node])
2831 2807 code = compiler(mod, cell_name, "single")
2832 2808 if self.run_code(code):
2833 2809 return True
2834 2810
2835 2811 # Flush softspace
2836 2812 if softspace(sys.stdout, 0):
2837 2813 print()
2838 2814
2839 2815 except:
2840 2816 # It's possible to have exceptions raised here, typically by
2841 2817 # compilation of odd code (such as a naked 'return' outside a
2842 2818 # function) that did parse but isn't valid. Typically the exception
2843 2819 # is a SyntaxError, but it's safest just to catch anything and show
2844 2820 # the user a traceback.
2845 2821
2846 2822 # We do only one try/except outside the loop to minimize the impact
2847 2823 # on runtime, and also because if any node in the node list is
2848 2824 # broken, we should stop execution completely.
2849 2825 self.showtraceback()
2850 2826
2851 2827 return False
2852 2828
2853 2829 def run_code(self, code_obj):
2854 2830 """Execute a code object.
2855 2831
2856 2832 When an exception occurs, self.showtraceback() is called to display a
2857 2833 traceback.
2858 2834
2859 2835 Parameters
2860 2836 ----------
2861 2837 code_obj : code object
2862 2838 A compiled code object, to be executed
2863 2839
2864 2840 Returns
2865 2841 -------
2866 2842 False : successful execution.
2867 2843 True : an error occurred.
2868 2844 """
2869 2845
2870 2846 # Set our own excepthook in case the user code tries to call it
2871 2847 # directly, so that the IPython crash handler doesn't get triggered
2872 2848 old_excepthook,sys.excepthook = sys.excepthook, self.excepthook
2873 2849
2874 2850 # we save the original sys.excepthook in the instance, in case config
2875 2851 # code (such as magics) needs access to it.
2876 2852 self.sys_excepthook = old_excepthook
2877 2853 outflag = 1 # happens in more places, so it's easier as default
2878 2854 try:
2879 2855 try:
2880 2856 self.hooks.pre_run_code_hook()
2881 2857 #rprint('Running code', repr(code_obj)) # dbg
2882 2858 exec(code_obj, self.user_global_ns, self.user_ns)
2883 2859 finally:
2884 2860 # Reset our crash handler in place
2885 2861 sys.excepthook = old_excepthook
2886 2862 except SystemExit:
2887 2863 self.showtraceback(exception_only=True)
2888 2864 warn("To exit: use 'exit', 'quit', or Ctrl-D.", level=1)
2889 2865 except self.custom_exceptions:
2890 2866 etype,value,tb = sys.exc_info()
2891 2867 self.CustomTB(etype,value,tb)
2892 2868 except:
2893 2869 self.showtraceback()
2894 2870 else:
2895 2871 outflag = 0
2896 2872 return outflag
2897 2873
2898 2874 # For backwards compatibility
2899 2875 runcode = run_code
2900 2876
2901 2877 #-------------------------------------------------------------------------
2902 2878 # Things related to GUI support and pylab
2903 2879 #-------------------------------------------------------------------------
2904 2880
2905 2881 def enable_gui(self, gui=None):
2906 2882 raise NotImplementedError('Implement enable_gui in a subclass')
2907 2883
2908 2884 def enable_matplotlib(self, gui=None):
2909 2885 """Enable interactive matplotlib and inline figure support.
2910 2886
2911 2887 This takes the following steps:
2912 2888
2913 2889 1. select the appropriate eventloop and matplotlib backend
2914 2890 2. set up matplotlib for interactive use with that backend
2915 2891 3. configure formatters for inline figure display
2916 2892 4. enable the selected gui eventloop
2917 2893
2918 2894 Parameters
2919 2895 ----------
2920 2896 gui : optional, string
2921 2897 If given, dictates the choice of matplotlib GUI backend to use
2922 2898 (should be one of IPython's supported backends, 'qt', 'osx', 'tk',
2923 2899 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by
2924 2900 matplotlib (as dictated by the matplotlib build-time options plus the
2925 2901 user's matplotlibrc configuration file). Note that not all backends
2926 2902 make sense in all contexts, for example a terminal ipython can't
2927 2903 display figures inline.
2928 2904 """
2929 2905 from IPython.core import pylabtools as pt
2930 2906 gui, backend = pt.find_gui_and_backend(gui, self.pylab_gui_select)
2931 2907
2932 2908 if gui != 'inline':
2933 2909 # If we have our first gui selection, store it
2934 2910 if self.pylab_gui_select is None:
2935 2911 self.pylab_gui_select = gui
2936 2912 # Otherwise if they are different
2937 2913 elif gui != self.pylab_gui_select:
2938 2914 print ('Warning: Cannot change to a different GUI toolkit: %s.'
2939 2915 ' Using %s instead.' % (gui, self.pylab_gui_select))
2940 2916 gui, backend = pt.find_gui_and_backend(self.pylab_gui_select)
2941 2917
2942 2918 pt.activate_matplotlib(backend)
2943 2919 pt.configure_inline_support(self, backend)
2944 2920
2945 2921 # Now we must activate the gui pylab wants to use, and fix %run to take
2946 2922 # plot updates into account
2947 2923 self.enable_gui(gui)
2948 2924 self.magics_manager.registry['ExecutionMagics'].default_runner = \
2949 2925 pt.mpl_runner(self.safe_execfile)
2950 2926
2951 2927 return gui, backend
2952 2928
2953 2929 def enable_pylab(self, gui=None, import_all=True, welcome_message=False):
2954 2930 """Activate pylab support at runtime.
2955 2931
2956 2932 This turns on support for matplotlib, preloads into the interactive
2957 2933 namespace all of numpy and pylab, and configures IPython to correctly
2958 2934 interact with the GUI event loop. The GUI backend to be used can be
2959 2935 optionally selected with the optional ``gui`` argument.
2960 2936
2961 2937 This method only adds preloading the namespace to InteractiveShell.enable_matplotlib.
2962 2938
2963 2939 Parameters
2964 2940 ----------
2965 2941 gui : optional, string
2966 2942 If given, dictates the choice of matplotlib GUI backend to use
2967 2943 (should be one of IPython's supported backends, 'qt', 'osx', 'tk',
2968 2944 'gtk', 'wx' or 'inline'), otherwise we use the default chosen by
2969 2945 matplotlib (as dictated by the matplotlib build-time options plus the
2970 2946 user's matplotlibrc configuration file). Note that not all backends
2971 2947 make sense in all contexts, for example a terminal ipython can't
2972 2948 display figures inline.
2973 2949 import_all : optional, bool, default: True
2974 2950 Whether to do `from numpy import *` and `from pylab import *`
2975 2951 in addition to module imports.
2976 2952 welcome_message : deprecated
2977 2953 This argument is ignored, no welcome message will be displayed.
2978 2954 """
2979 2955 from IPython.core.pylabtools import import_pylab
2980 2956
2981 2957 gui, backend = self.enable_matplotlib(gui)
2982 2958
2983 2959 # We want to prevent the loading of pylab to pollute the user's
2984 2960 # namespace as shown by the %who* magics, so we execute the activation
2985 2961 # code in an empty namespace, and we update *both* user_ns and
2986 2962 # user_ns_hidden with this information.
2987 2963 ns = {}
2988 2964 import_pylab(ns, import_all)
2989 2965 # warn about clobbered names
2990 2966 ignored = set(["__builtins__"])
2991 2967 both = set(ns).intersection(self.user_ns).difference(ignored)
2992 2968 clobbered = [ name for name in both if self.user_ns[name] is not ns[name] ]
2993 2969 self.user_ns.update(ns)
2994 2970 self.user_ns_hidden.update(ns)
2995 2971 return gui, backend, clobbered
2996 2972
2997 2973 #-------------------------------------------------------------------------
2998 2974 # Utilities
2999 2975 #-------------------------------------------------------------------------
3000 2976
3001 2977 def var_expand(self, cmd, depth=0, formatter=DollarFormatter()):
3002 2978 """Expand python variables in a string.
3003 2979
3004 2980 The depth argument indicates how many frames above the caller should
3005 2981 be walked to look for the local namespace where to expand variables.
3006 2982
3007 2983 The global namespace for expansion is always the user's interactive
3008 2984 namespace.
3009 2985 """
3010 2986 ns = self.user_ns.copy()
3011 2987 ns.update(sys._getframe(depth+1).f_locals)
3012 2988 try:
3013 2989 # We have to use .vformat() here, because 'self' is a valid and common
3014 2990 # name, and expanding **ns for .format() would make it collide with
3015 2991 # the 'self' argument of the method.
3016 2992 cmd = formatter.vformat(cmd, args=[], kwargs=ns)
3017 2993 except Exception:
3018 2994 # if formatter couldn't format, just let it go untransformed
3019 2995 pass
3020 2996 return cmd
3021 2997
3022 2998 def mktempfile(self, data=None, prefix='ipython_edit_'):
3023 2999 """Make a new tempfile and return its filename.
3024 3000
3025 3001 This makes a call to tempfile.mkstemp (created in a tempfile.mkdtemp),
3026 3002 but it registers the created filename internally so ipython cleans it up
3027 3003 at exit time.
3028 3004
3029 3005 Optional inputs:
3030 3006
3031 3007 - data(None): if data is given, it gets written out to the temp file
3032 3008 immediately, and the file is closed again."""
3033 3009
3034 3010 dirname = tempfile.mkdtemp(prefix=prefix)
3035 3011 self.tempdirs.append(dirname)
3036 3012
3037 3013 handle, filename = tempfile.mkstemp('.py', prefix, dir=dirname)
3038 3014 self.tempfiles.append(filename)
3039 3015
3040 3016 if data:
3041 3017 tmp_file = open(filename,'w')
3042 3018 tmp_file.write(data)
3043 3019 tmp_file.close()
3044 3020 return filename
3045 3021
3046 3022 # TODO: This should be removed when Term is refactored.
3047 3023 def write(self,data):
3048 3024 """Write a string to the default output"""
3049 3025 io.stdout.write(data)
3050 3026
3051 3027 # TODO: This should be removed when Term is refactored.
3052 3028 def write_err(self,data):
3053 3029 """Write a string to the default error output"""
3054 3030 io.stderr.write(data)
3055 3031
3056 3032 def ask_yes_no(self, prompt, default=None):
3057 3033 if self.quiet:
3058 3034 return True
3059 3035 return ask_yes_no(prompt,default)
3060 3036
3061 3037 def show_usage(self):
3062 3038 """Show a usage message"""
3063 3039 page.page(IPython.core.usage.interactive_usage)
3064 3040
3065 3041 def extract_input_lines(self, range_str, raw=False):
3066 3042 """Return as a string a set of input history slices.
3067 3043
3068 3044 Parameters
3069 3045 ----------
3070 3046 range_str : string
3071 3047 The set of slices is given as a string, like "~5/6-~4/2 4:8 9",
3072 3048 since this function is for use by magic functions which get their
3073 3049 arguments as strings. The number before the / is the session
3074 3050 number: ~n goes n back from the current session.
3075 3051
3076 3052 raw : bool, optional
3077 3053 By default, the processed input is used. If this is true, the raw
3078 3054 input history is used instead.
3079 3055
3080 3056 Notes
3081 3057 -----
3082 3058
3083 3059 Slices can be described with two notations:
3084 3060
3085 3061 * ``N:M`` -> standard python form, means including items N...(M-1).
3086 3062 * ``N-M`` -> include items N..M (closed endpoint).
3087 3063 """
3088 3064 lines = self.history_manager.get_range_by_str(range_str, raw=raw)
3089 3065 return "\n".join(x for _, _, x in lines)
3090 3066
3091 3067 def find_user_code(self, target, raw=True, py_only=False, skip_encoding_cookie=True, search_ns=False):
3092 3068 """Get a code string from history, file, url, or a string or macro.
3093 3069
3094 3070 This is mainly used by magic functions.
3095 3071
3096 3072 Parameters
3097 3073 ----------
3098 3074
3099 3075 target : str
3100 3076
3101 3077 A string specifying code to retrieve. This will be tried respectively
3102 3078 as: ranges of input history (see %history for syntax), url,
3103 3079 correspnding .py file, filename, or an expression evaluating to a
3104 3080 string or Macro in the user namespace.
3105 3081
3106 3082 raw : bool
3107 3083 If true (default), retrieve raw history. Has no effect on the other
3108 3084 retrieval mechanisms.
3109 3085
3110 3086 py_only : bool (default False)
3111 3087 Only try to fetch python code, do not try alternative methods to decode file
3112 3088 if unicode fails.
3113 3089
3114 3090 Returns
3115 3091 -------
3116 3092 A string of code.
3117 3093
3118 3094 ValueError is raised if nothing is found, and TypeError if it evaluates
3119 3095 to an object of another type. In each case, .args[0] is a printable
3120 3096 message.
3121 3097 """
3122 3098 code = self.extract_input_lines(target, raw=raw) # Grab history
3123 3099 if code:
3124 3100 return code
3125 3101 utarget = unquote_filename(target)
3126 3102 try:
3127 3103 if utarget.startswith(('http://', 'https://')):
3128 3104 return openpy.read_py_url(utarget, skip_encoding_cookie=skip_encoding_cookie)
3129 3105 except UnicodeDecodeError:
3130 3106 if not py_only :
3131 3107 # Deferred import
3132 3108 try:
3133 3109 from urllib.request import urlopen # Py3
3134 3110 except ImportError:
3135 3111 from urllib import urlopen
3136 3112 response = urlopen(target)
3137 3113 return response.read().decode('latin1')
3138 3114 raise ValueError(("'%s' seem to be unreadable.") % utarget)
3139 3115
3140 3116 potential_target = [target]
3141 3117 try :
3142 3118 potential_target.insert(0,get_py_filename(target))
3143 3119 except IOError:
3144 3120 pass
3145 3121
3146 3122 for tgt in potential_target :
3147 3123 if os.path.isfile(tgt): # Read file
3148 3124 try :
3149 3125 return openpy.read_py_file(tgt, skip_encoding_cookie=skip_encoding_cookie)
3150 3126 except UnicodeDecodeError :
3151 3127 if not py_only :
3152 3128 with io_open(tgt,'r', encoding='latin1') as f :
3153 3129 return f.read()
3154 3130 raise ValueError(("'%s' seem to be unreadable.") % target)
3155 3131 elif os.path.isdir(os.path.expanduser(tgt)):
3156 3132 raise ValueError("'%s' is a directory, not a regular file." % target)
3157 3133
3158 3134 if search_ns:
3159 3135 # Inspect namespace to load object source
3160 3136 object_info = self.object_inspect(target, detail_level=1)
3161 3137 if object_info['found'] and object_info['source']:
3162 3138 return object_info['source']
3163 3139
3164 3140 try: # User namespace
3165 3141 codeobj = eval(target, self.user_ns)
3166 3142 except Exception:
3167 3143 raise ValueError(("'%s' was not found in history, as a file, url, "
3168 3144 "nor in the user namespace.") % target)
3169 3145
3170 3146 if isinstance(codeobj, string_types):
3171 3147 return codeobj
3172 3148 elif isinstance(codeobj, Macro):
3173 3149 return codeobj.value
3174 3150
3175 3151 raise TypeError("%s is neither a string nor a macro." % target,
3176 3152 codeobj)
3177 3153
3178 3154 #-------------------------------------------------------------------------
3179 3155 # Things related to IPython exiting
3180 3156 #-------------------------------------------------------------------------
3181 3157 def atexit_operations(self):
3182 3158 """This will be executed at the time of exit.
3183 3159
3184 3160 Cleanup operations and saving of persistent data that is done
3185 3161 unconditionally by IPython should be performed here.
3186 3162
3187 3163 For things that may depend on startup flags or platform specifics (such
3188 3164 as having readline or not), register a separate atexit function in the
3189 3165 code that has the appropriate information, rather than trying to
3190 3166 clutter
3191 3167 """
3192 3168 # Close the history session (this stores the end time and line count)
3193 3169 # this must be *before* the tempfile cleanup, in case of temporary
3194 3170 # history db
3195 3171 self.history_manager.end_session()
3196 3172
3197 3173 # Cleanup all tempfiles and folders left around
3198 3174 for tfile in self.tempfiles:
3199 3175 try:
3200 3176 os.unlink(tfile)
3201 3177 except OSError:
3202 3178 pass
3203 3179
3204 3180 for tdir in self.tempdirs:
3205 3181 try:
3206 3182 os.rmdir(tdir)
3207 3183 except OSError:
3208 3184 pass
3209 3185
3210 3186 # Clear all user namespaces to release all references cleanly.
3211 3187 self.reset(new_session=False)
3212 3188
3213 3189 # Run user hooks
3214 3190 self.hooks.shutdown_hook()
3215 3191
3216 3192 def cleanup(self):
3217 3193 self.restore_sys_module_state()
3218 3194
3219 3195
3220 3196 class InteractiveShellABC(with_metaclass(abc.ABCMeta, object)):
3221 3197 """An abstract base class for InteractiveShell."""
3222 3198
3223 3199 InteractiveShellABC.register(InteractiveShell)
@@ -1,734 +1,721 b''
1 1 # -*- coding: utf-8 -*-
2 2 """Tests for the key interactiveshell module.
3 3
4 4 Historically the main classes in interactiveshell have been under-tested. This
5 5 module should grow as many single-method tests as possible to trap many of the
6 6 recurring bugs we seem to encounter with high-level interaction.
7
8 Authors
9 -------
10 * Fernando Perez
11 7 """
12 #-----------------------------------------------------------------------------
13 # Copyright (C) 2011 The IPython Development Team
14 #
15 # Distributed under the terms of the BSD License. The full license is in
16 # the file COPYING, distributed as part of this software.
17 #-----------------------------------------------------------------------------
18 8
19 #-----------------------------------------------------------------------------
20 # Imports
21 #-----------------------------------------------------------------------------
22 # stdlib
9 # Copyright (c) IPython Development Team.
10 # Distributed under the terms of the Modified BSD License.
11
23 12 import ast
24 13 import os
25 14 import signal
26 15 import shutil
27 16 import sys
28 17 import tempfile
29 18 import unittest
30 19 try:
31 20 from unittest import mock
32 21 except ImportError:
33 22 import mock
34 23 from os.path import join
35 24
36 # third-party
37 25 import nose.tools as nt
38 26
39 # Our own
40 27 from IPython.core.inputtransformer import InputTransformer
41 28 from IPython.testing.decorators import skipif, skip_win32, onlyif_unicode_paths
42 29 from IPython.testing import tools as tt
43 30 from IPython.utils import io
44 31 from IPython.utils import py3compat
45 32 from IPython.utils.py3compat import unicode_type, PY3
46 33
47 34 if PY3:
48 35 from io import StringIO
49 36 else:
50 37 from StringIO import StringIO
51 38
52 39 #-----------------------------------------------------------------------------
53 40 # Globals
54 41 #-----------------------------------------------------------------------------
55 42 # This is used by every single test, no point repeating it ad nauseam
56 43 ip = get_ipython()
57 44
58 45 #-----------------------------------------------------------------------------
59 46 # Tests
60 47 #-----------------------------------------------------------------------------
61 48
62 49 class InteractiveShellTestCase(unittest.TestCase):
63 50 def test_naked_string_cells(self):
64 51 """Test that cells with only naked strings are fully executed"""
65 52 # First, single-line inputs
66 53 ip.run_cell('"a"\n')
67 54 self.assertEqual(ip.user_ns['_'], 'a')
68 55 # And also multi-line cells
69 56 ip.run_cell('"""a\nb"""\n')
70 57 self.assertEqual(ip.user_ns['_'], 'a\nb')
71 58
72 59 def test_run_empty_cell(self):
73 60 """Just make sure we don't get a horrible error with a blank
74 61 cell of input. Yes, I did overlook that."""
75 62 old_xc = ip.execution_count
76 63 ip.run_cell('')
77 64 self.assertEqual(ip.execution_count, old_xc)
78 65
79 66 def test_run_cell_multiline(self):
80 67 """Multi-block, multi-line cells must execute correctly.
81 68 """
82 69 src = '\n'.join(["x=1",
83 70 "y=2",
84 71 "if 1:",
85 72 " x += 1",
86 73 " y += 1",])
87 74 ip.run_cell(src)
88 75 self.assertEqual(ip.user_ns['x'], 2)
89 76 self.assertEqual(ip.user_ns['y'], 3)
90 77
91 78 def test_multiline_string_cells(self):
92 79 "Code sprinkled with multiline strings should execute (GH-306)"
93 80 ip.run_cell('tmp=0')
94 81 self.assertEqual(ip.user_ns['tmp'], 0)
95 82 ip.run_cell('tmp=1;"""a\nb"""\n')
96 83 self.assertEqual(ip.user_ns['tmp'], 1)
97 84
98 85 def test_dont_cache_with_semicolon(self):
99 86 "Ending a line with semicolon should not cache the returned object (GH-307)"
100 87 oldlen = len(ip.user_ns['Out'])
101 88 for cell in ['1;', '1;1;']:
102 89 ip.run_cell(cell, store_history=True)
103 90 newlen = len(ip.user_ns['Out'])
104 91 self.assertEqual(oldlen, newlen)
105 92 i = 0
106 93 #also test the default caching behavior
107 94 for cell in ['1', '1;1']:
108 95 ip.run_cell(cell, store_history=True)
109 96 newlen = len(ip.user_ns['Out'])
110 97 i += 1
111 98 self.assertEqual(oldlen+i, newlen)
112 99
113 100 def test_In_variable(self):
114 101 "Verify that In variable grows with user input (GH-284)"
115 102 oldlen = len(ip.user_ns['In'])
116 103 ip.run_cell('1;', store_history=True)
117 104 newlen = len(ip.user_ns['In'])
118 105 self.assertEqual(oldlen+1, newlen)
119 106 self.assertEqual(ip.user_ns['In'][-1],'1;')
120 107
121 108 def test_magic_names_in_string(self):
122 109 ip.run_cell('a = """\n%exit\n"""')
123 110 self.assertEqual(ip.user_ns['a'], '\n%exit\n')
124 111
125 112 def test_trailing_newline(self):
126 113 """test that running !(command) does not raise a SyntaxError"""
127 114 ip.run_cell('!(true)\n', False)
128 115 ip.run_cell('!(true)\n\n\n', False)
129 116
130 117 def test_gh_597(self):
131 118 """Pretty-printing lists of objects with non-ascii reprs may cause
132 119 problems."""
133 120 class Spam(object):
134 121 def __repr__(self):
135 122 return "\xe9"*50
136 123 import IPython.core.formatters
137 124 f = IPython.core.formatters.PlainTextFormatter()
138 125 f([Spam(),Spam()])
139 126
140 127
141 128 def test_future_flags(self):
142 129 """Check that future flags are used for parsing code (gh-777)"""
143 130 ip.run_cell('from __future__ import print_function')
144 131 try:
145 132 ip.run_cell('prfunc_return_val = print(1,2, sep=" ")')
146 133 assert 'prfunc_return_val' in ip.user_ns
147 134 finally:
148 135 # Reset compiler flags so we don't mess up other tests.
149 136 ip.compile.reset_compiler_flags()
150 137
151 138 def test_future_unicode(self):
152 139 """Check that unicode_literals is imported from __future__ (gh #786)"""
153 140 try:
154 141 ip.run_cell(u'byte_str = "a"')
155 142 assert isinstance(ip.user_ns['byte_str'], str) # string literals are byte strings by default
156 143 ip.run_cell('from __future__ import unicode_literals')
157 144 ip.run_cell(u'unicode_str = "a"')
158 145 assert isinstance(ip.user_ns['unicode_str'], unicode_type) # strings literals are now unicode
159 146 finally:
160 147 # Reset compiler flags so we don't mess up other tests.
161 148 ip.compile.reset_compiler_flags()
162 149
163 150 def test_can_pickle(self):
164 151 "Can we pickle objects defined interactively (GH-29)"
165 152 ip = get_ipython()
166 153 ip.reset()
167 154 ip.run_cell(("class Mylist(list):\n"
168 155 " def __init__(self,x=[]):\n"
169 156 " list.__init__(self,x)"))
170 157 ip.run_cell("w=Mylist([1,2,3])")
171 158
172 159 from pickle import dumps
173 160
174 161 # We need to swap in our main module - this is only necessary
175 162 # inside the test framework, because IPython puts the interactive module
176 163 # in place (but the test framework undoes this).
177 164 _main = sys.modules['__main__']
178 165 sys.modules['__main__'] = ip.user_module
179 166 try:
180 167 res = dumps(ip.user_ns["w"])
181 168 finally:
182 169 sys.modules['__main__'] = _main
183 170 self.assertTrue(isinstance(res, bytes))
184 171
185 172 def test_global_ns(self):
186 173 "Code in functions must be able to access variables outside them."
187 174 ip = get_ipython()
188 175 ip.run_cell("a = 10")
189 176 ip.run_cell(("def f(x):\n"
190 177 " return x + a"))
191 178 ip.run_cell("b = f(12)")
192 179 self.assertEqual(ip.user_ns["b"], 22)
193 180
194 181 def test_bad_custom_tb(self):
195 182 """Check that InteractiveShell is protected from bad custom exception handlers"""
196 183 from IPython.utils import io
197 184 save_stderr = io.stderr
198 185 try:
199 186 # capture stderr
200 187 io.stderr = StringIO()
201 188 ip.set_custom_exc((IOError,), lambda etype,value,tb: 1/0)
202 189 self.assertEqual(ip.custom_exceptions, (IOError,))
203 190 ip.run_cell(u'raise IOError("foo")')
204 191 self.assertEqual(ip.custom_exceptions, ())
205 192 self.assertTrue("Custom TB Handler failed" in io.stderr.getvalue())
206 193 finally:
207 194 io.stderr = save_stderr
208 195
209 196 def test_bad_custom_tb_return(self):
210 197 """Check that InteractiveShell is protected from bad return types in custom exception handlers"""
211 198 from IPython.utils import io
212 199 save_stderr = io.stderr
213 200 try:
214 201 # capture stderr
215 202 io.stderr = StringIO()
216 203 ip.set_custom_exc((NameError,),lambda etype,value,tb, tb_offset=None: 1)
217 204 self.assertEqual(ip.custom_exceptions, (NameError,))
218 205 ip.run_cell(u'a=abracadabra')
219 206 self.assertEqual(ip.custom_exceptions, ())
220 207 self.assertTrue("Custom TB Handler failed" in io.stderr.getvalue())
221 208 finally:
222 209 io.stderr = save_stderr
223 210
224 211 def test_drop_by_id(self):
225 212 myvars = {"a":object(), "b":object(), "c": object()}
226 213 ip.push(myvars, interactive=False)
227 214 for name in myvars:
228 215 assert name in ip.user_ns, name
229 216 assert name in ip.user_ns_hidden, name
230 217 ip.user_ns['b'] = 12
231 218 ip.drop_by_id(myvars)
232 219 for name in ["a", "c"]:
233 220 assert name not in ip.user_ns, name
234 221 assert name not in ip.user_ns_hidden, name
235 222 assert ip.user_ns['b'] == 12
236 223 ip.reset()
237 224
238 225 def test_var_expand(self):
239 226 ip.user_ns['f'] = u'Ca\xf1o'
240 227 self.assertEqual(ip.var_expand(u'echo $f'), u'echo Ca\xf1o')
241 228 self.assertEqual(ip.var_expand(u'echo {f}'), u'echo Ca\xf1o')
242 229 self.assertEqual(ip.var_expand(u'echo {f[:-1]}'), u'echo Ca\xf1')
243 230 self.assertEqual(ip.var_expand(u'echo {1*2}'), u'echo 2')
244 231
245 232 ip.user_ns['f'] = b'Ca\xc3\xb1o'
246 233 # This should not raise any exception:
247 234 ip.var_expand(u'echo $f')
248 235
249 236 def test_var_expand_local(self):
250 237 """Test local variable expansion in !system and %magic calls"""
251 238 # !system
252 239 ip.run_cell('def test():\n'
253 240 ' lvar = "ttt"\n'
254 241 ' ret = !echo {lvar}\n'
255 242 ' return ret[0]\n')
256 243 res = ip.user_ns['test']()
257 244 nt.assert_in('ttt', res)
258 245
259 246 # %magic
260 247 ip.run_cell('def makemacro():\n'
261 248 ' macroname = "macro_var_expand_locals"\n'
262 249 ' %macro {macroname} codestr\n')
263 250 ip.user_ns['codestr'] = "str(12)"
264 251 ip.run_cell('makemacro()')
265 252 nt.assert_in('macro_var_expand_locals', ip.user_ns)
266 253
267 254 def test_var_expand_self(self):
268 255 """Test variable expansion with the name 'self', which was failing.
269 256
270 257 See https://github.com/ipython/ipython/issues/1878#issuecomment-7698218
271 258 """
272 259 ip.run_cell('class cTest:\n'
273 260 ' classvar="see me"\n'
274 261 ' def test(self):\n'
275 262 ' res = !echo Variable: {self.classvar}\n'
276 263 ' return res[0]\n')
277 264 nt.assert_in('see me', ip.user_ns['cTest']().test())
278 265
279 266 def test_bad_var_expand(self):
280 267 """var_expand on invalid formats shouldn't raise"""
281 268 # SyntaxError
282 269 self.assertEqual(ip.var_expand(u"{'a':5}"), u"{'a':5}")
283 270 # NameError
284 271 self.assertEqual(ip.var_expand(u"{asdf}"), u"{asdf}")
285 272 # ZeroDivisionError
286 273 self.assertEqual(ip.var_expand(u"{1/0}"), u"{1/0}")
287 274
288 275 def test_silent_postexec(self):
289 276 """run_cell(silent=True) doesn't invoke pre/post_run_cell callbacks"""
290 277 pre_explicit = mock.Mock()
291 278 pre_always = mock.Mock()
292 279 post_explicit = mock.Mock()
293 280 post_always = mock.Mock()
294 281
295 282 ip.events.register('pre_run_cell', pre_explicit)
296 283 ip.events.register('pre_execute', pre_always)
297 284 ip.events.register('post_run_cell', post_explicit)
298 285 ip.events.register('post_execute', post_always)
299 286
300 287 try:
301 288 ip.run_cell("1", silent=True)
302 289 assert pre_always.called
303 290 assert not pre_explicit.called
304 291 assert post_always.called
305 292 assert not post_explicit.called
306 293 # double-check that non-silent exec did what we expected
307 294 # silent to avoid
308 295 ip.run_cell("1")
309 296 assert pre_explicit.called
310 297 assert post_explicit.called
311 298 finally:
312 299 # remove post-exec
313 300 ip.events.reset_all()
314 301
315 302 def test_silent_noadvance(self):
316 303 """run_cell(silent=True) doesn't advance execution_count"""
317 304 ec = ip.execution_count
318 305 # silent should force store_history=False
319 306 ip.run_cell("1", store_history=True, silent=True)
320 307
321 308 self.assertEqual(ec, ip.execution_count)
322 309 # double-check that non-silent exec did what we expected
323 310 # silent to avoid
324 311 ip.run_cell("1", store_history=True)
325 312 self.assertEqual(ec+1, ip.execution_count)
326 313
327 314 def test_silent_nodisplayhook(self):
328 315 """run_cell(silent=True) doesn't trigger displayhook"""
329 316 d = dict(called=False)
330 317
331 318 trap = ip.display_trap
332 319 save_hook = trap.hook
333 320
334 321 def failing_hook(*args, **kwargs):
335 322 d['called'] = True
336 323
337 324 try:
338 325 trap.hook = failing_hook
339 326 ip.run_cell("1", silent=True)
340 327 self.assertFalse(d['called'])
341 328 # double-check that non-silent exec did what we expected
342 329 # silent to avoid
343 330 ip.run_cell("1")
344 331 self.assertTrue(d['called'])
345 332 finally:
346 333 trap.hook = save_hook
347 334
348 335 @skipif(sys.version_info[0] >= 3, "softspace removed in py3")
349 336 def test_print_softspace(self):
350 337 """Verify that softspace is handled correctly when executing multiple
351 338 statements.
352 339
353 340 In [1]: print 1; print 2
354 341 1
355 342 2
356 343
357 344 In [2]: print 1,; print 2
358 345 1 2
359 346 """
360 347
361 348 def test_ofind_line_magic(self):
362 349 from IPython.core.magic import register_line_magic
363 350
364 351 @register_line_magic
365 352 def lmagic(line):
366 353 "A line magic"
367 354
368 355 # Get info on line magic
369 356 lfind = ip._ofind('lmagic')
370 357 info = dict(found=True, isalias=False, ismagic=True,
371 358 namespace = 'IPython internal', obj= lmagic.__wrapped__,
372 359 parent = None)
373 360 nt.assert_equal(lfind, info)
374 361
375 362 def test_ofind_cell_magic(self):
376 363 from IPython.core.magic import register_cell_magic
377 364
378 365 @register_cell_magic
379 366 def cmagic(line, cell):
380 367 "A cell magic"
381 368
382 369 # Get info on cell magic
383 370 find = ip._ofind('cmagic')
384 371 info = dict(found=True, isalias=False, ismagic=True,
385 372 namespace = 'IPython internal', obj= cmagic.__wrapped__,
386 373 parent = None)
387 374 nt.assert_equal(find, info)
388 375
389 376 def test_custom_exception(self):
390 377 called = []
391 378 def my_handler(shell, etype, value, tb, tb_offset=None):
392 379 called.append(etype)
393 380 shell.showtraceback((etype, value, tb), tb_offset=tb_offset)
394 381
395 382 ip.set_custom_exc((ValueError,), my_handler)
396 383 try:
397 384 ip.run_cell("raise ValueError('test')")
398 385 # Check that this was called, and only once.
399 386 self.assertEqual(called, [ValueError])
400 387 finally:
401 388 # Reset the custom exception hook
402 389 ip.set_custom_exc((), None)
403 390
404 391 @skipif(sys.version_info[0] >= 3, "no differences with __future__ in py3")
405 392 def test_future_environment(self):
406 393 "Can we run code with & without the shell's __future__ imports?"
407 394 ip.run_cell("from __future__ import division")
408 395 ip.run_cell("a = 1/2", shell_futures=True)
409 396 self.assertEqual(ip.user_ns['a'], 0.5)
410 397 ip.run_cell("b = 1/2", shell_futures=False)
411 398 self.assertEqual(ip.user_ns['b'], 0)
412 399
413 400 ip.compile.reset_compiler_flags()
414 401 # This shouldn't leak to the shell's compiler
415 402 ip.run_cell("from __future__ import division \nc=1/2", shell_futures=False)
416 403 self.assertEqual(ip.user_ns['c'], 0.5)
417 404 ip.run_cell("d = 1/2", shell_futures=True)
418 405 self.assertEqual(ip.user_ns['d'], 0)
419 406
420 407
421 408 class TestSafeExecfileNonAsciiPath(unittest.TestCase):
422 409
423 410 @onlyif_unicode_paths
424 411 def setUp(self):
425 412 self.BASETESTDIR = tempfile.mkdtemp()
426 413 self.TESTDIR = join(self.BASETESTDIR, u"Γ₯Àâ")
427 414 os.mkdir(self.TESTDIR)
428 415 with open(join(self.TESTDIR, u"Γ₯Àâtestscript.py"), "w") as sfile:
429 416 sfile.write("pass\n")
430 417 self.oldpath = py3compat.getcwd()
431 418 os.chdir(self.TESTDIR)
432 419 self.fname = u"Γ₯Àâtestscript.py"
433 420
434 421 def tearDown(self):
435 422 os.chdir(self.oldpath)
436 423 shutil.rmtree(self.BASETESTDIR)
437 424
438 425 @onlyif_unicode_paths
439 426 def test_1(self):
440 427 """Test safe_execfile with non-ascii path
441 428 """
442 429 ip.safe_execfile(self.fname, {}, raise_exceptions=True)
443 430
444 431 class ExitCodeChecks(tt.TempFileMixin):
445 432 def test_exit_code_ok(self):
446 433 self.system('exit 0')
447 434 self.assertEqual(ip.user_ns['_exit_code'], 0)
448 435
449 436 def test_exit_code_error(self):
450 437 self.system('exit 1')
451 438 self.assertEqual(ip.user_ns['_exit_code'], 1)
452 439
453 440 @skipif(not hasattr(signal, 'SIGALRM'))
454 441 def test_exit_code_signal(self):
455 442 self.mktmp("import signal, time\n"
456 443 "signal.setitimer(signal.ITIMER_REAL, 0.1)\n"
457 444 "time.sleep(1)\n")
458 445 self.system("%s %s" % (sys.executable, self.fname))
459 446 self.assertEqual(ip.user_ns['_exit_code'], -signal.SIGALRM)
460 447
461 448 class TestSystemRaw(unittest.TestCase, ExitCodeChecks):
462 449 system = ip.system_raw
463 450
464 451 @onlyif_unicode_paths
465 452 def test_1(self):
466 453 """Test system_raw with non-ascii cmd
467 454 """
468 455 cmd = u'''python -c "'Γ₯Àâ'" '''
469 456 ip.system_raw(cmd)
470 457
471 458 # TODO: Exit codes are currently ignored on Windows.
472 459 class TestSystemPipedExitCode(unittest.TestCase, ExitCodeChecks):
473 460 system = ip.system_piped
474 461
475 462 @skip_win32
476 463 def test_exit_code_ok(self):
477 464 ExitCodeChecks.test_exit_code_ok(self)
478 465
479 466 @skip_win32
480 467 def test_exit_code_error(self):
481 468 ExitCodeChecks.test_exit_code_error(self)
482 469
483 470 @skip_win32
484 471 def test_exit_code_signal(self):
485 472 ExitCodeChecks.test_exit_code_signal(self)
486 473
487 474 class TestModules(unittest.TestCase, tt.TempFileMixin):
488 475 def test_extraneous_loads(self):
489 476 """Test we're not loading modules on startup that we shouldn't.
490 477 """
491 478 self.mktmp("import sys\n"
492 479 "print('numpy' in sys.modules)\n"
493 480 "print('IPython.parallel' in sys.modules)\n"
494 481 "print('IPython.kernel.zmq' in sys.modules)\n"
495 482 )
496 483 out = "False\nFalse\nFalse\n"
497 484 tt.ipexec_validate(self.fname, out)
498 485
499 486 class Negator(ast.NodeTransformer):
500 487 """Negates all number literals in an AST."""
501 488 def visit_Num(self, node):
502 489 node.n = -node.n
503 490 return node
504 491
505 492 class TestAstTransform(unittest.TestCase):
506 493 def setUp(self):
507 494 self.negator = Negator()
508 495 ip.ast_transformers.append(self.negator)
509 496
510 497 def tearDown(self):
511 498 ip.ast_transformers.remove(self.negator)
512 499
513 500 def test_run_cell(self):
514 501 with tt.AssertPrints('-34'):
515 502 ip.run_cell('print (12 + 22)')
516 503
517 504 # A named reference to a number shouldn't be transformed.
518 505 ip.user_ns['n'] = 55
519 506 with tt.AssertNotPrints('-55'):
520 507 ip.run_cell('print (n)')
521 508
522 509 def test_timeit(self):
523 510 called = set()
524 511 def f(x):
525 512 called.add(x)
526 513 ip.push({'f':f})
527 514
528 515 with tt.AssertPrints("best of "):
529 516 ip.run_line_magic("timeit", "-n1 f(1)")
530 517 self.assertEqual(called, set([-1]))
531 518 called.clear()
532 519
533 520 with tt.AssertPrints("best of "):
534 521 ip.run_cell_magic("timeit", "-n1 f(2)", "f(3)")
535 522 self.assertEqual(called, set([-2, -3]))
536 523
537 524 def test_time(self):
538 525 called = []
539 526 def f(x):
540 527 called.append(x)
541 528 ip.push({'f':f})
542 529
543 530 # Test with an expression
544 531 with tt.AssertPrints("Wall time: "):
545 532 ip.run_line_magic("time", "f(5+9)")
546 533 self.assertEqual(called, [-14])
547 534 called[:] = []
548 535
549 536 # Test with a statement (different code path)
550 537 with tt.AssertPrints("Wall time: "):
551 538 ip.run_line_magic("time", "a = f(-3 + -2)")
552 539 self.assertEqual(called, [5])
553 540
554 541 def test_macro(self):
555 542 ip.push({'a':10})
556 543 # The AST transformation makes this do a+=-1
557 544 ip.define_macro("amacro", "a+=1\nprint(a)")
558 545
559 546 with tt.AssertPrints("9"):
560 547 ip.run_cell("amacro")
561 548 with tt.AssertPrints("8"):
562 549 ip.run_cell("amacro")
563 550
564 551 class IntegerWrapper(ast.NodeTransformer):
565 552 """Wraps all integers in a call to Integer()"""
566 553 def visit_Num(self, node):
567 554 if isinstance(node.n, int):
568 555 return ast.Call(func=ast.Name(id='Integer', ctx=ast.Load()),
569 556 args=[node], keywords=[])
570 557 return node
571 558
572 559 class TestAstTransform2(unittest.TestCase):
573 560 def setUp(self):
574 561 self.intwrapper = IntegerWrapper()
575 562 ip.ast_transformers.append(self.intwrapper)
576 563
577 564 self.calls = []
578 565 def Integer(*args):
579 566 self.calls.append(args)
580 567 return args
581 568 ip.push({"Integer": Integer})
582 569
583 570 def tearDown(self):
584 571 ip.ast_transformers.remove(self.intwrapper)
585 572 del ip.user_ns['Integer']
586 573
587 574 def test_run_cell(self):
588 575 ip.run_cell("n = 2")
589 576 self.assertEqual(self.calls, [(2,)])
590 577
591 578 # This shouldn't throw an error
592 579 ip.run_cell("o = 2.0")
593 580 self.assertEqual(ip.user_ns['o'], 2.0)
594 581
595 582 def test_timeit(self):
596 583 called = set()
597 584 def f(x):
598 585 called.add(x)
599 586 ip.push({'f':f})
600 587
601 588 with tt.AssertPrints("best of "):
602 589 ip.run_line_magic("timeit", "-n1 f(1)")
603 590 self.assertEqual(called, set([(1,)]))
604 591 called.clear()
605 592
606 593 with tt.AssertPrints("best of "):
607 594 ip.run_cell_magic("timeit", "-n1 f(2)", "f(3)")
608 595 self.assertEqual(called, set([(2,), (3,)]))
609 596
610 597 class ErrorTransformer(ast.NodeTransformer):
611 598 """Throws an error when it sees a number."""
612 599 def visit_Num(self):
613 600 raise ValueError("test")
614 601
615 602 class TestAstTransformError(unittest.TestCase):
616 603 def test_unregistering(self):
617 604 err_transformer = ErrorTransformer()
618 605 ip.ast_transformers.append(err_transformer)
619 606
620 607 with tt.AssertPrints("unregister", channel='stderr'):
621 608 ip.run_cell("1 + 2")
622 609
623 610 # This should have been removed.
624 611 nt.assert_not_in(err_transformer, ip.ast_transformers)
625 612
626 613 def test__IPYTHON__():
627 614 # This shouldn't raise a NameError, that's all
628 615 __IPYTHON__
629 616
630 617
631 618 class DummyRepr(object):
632 619 def __repr__(self):
633 620 return "DummyRepr"
634 621
635 622 def _repr_html_(self):
636 623 return "<b>dummy</b>"
637 624
638 625 def _repr_javascript_(self):
639 626 return "console.log('hi');", {'key': 'value'}
640 627
641 628
642 629 def test_user_variables():
643 630 # enable all formatters
644 631 ip.display_formatter.active_types = ip.display_formatter.format_types
645 632
646 633 ip.user_ns['dummy'] = d = DummyRepr()
647 634 keys = set(['dummy', 'doesnotexist'])
648 r = ip.user_variables(keys)
635 r = ip.user_expressions({ key:key for key in keys})
649 636
650 637 nt.assert_equal(keys, set(r.keys()))
651 638 dummy = r['dummy']
652 639 nt.assert_equal(set(['status', 'data', 'metadata']), set(dummy.keys()))
653 640 nt.assert_equal(dummy['status'], 'ok')
654 641 data = dummy['data']
655 642 metadata = dummy['metadata']
656 643 nt.assert_equal(data.get('text/html'), d._repr_html_())
657 644 js, jsmd = d._repr_javascript_()
658 645 nt.assert_equal(data.get('application/javascript'), js)
659 646 nt.assert_equal(metadata.get('application/javascript'), jsmd)
660 647
661 648 dne = r['doesnotexist']
662 649 nt.assert_equal(dne['status'], 'error')
663 nt.assert_equal(dne['ename'], 'KeyError')
650 nt.assert_equal(dne['ename'], 'NameError')
664 651
665 652 # back to text only
666 653 ip.display_formatter.active_types = ['text/plain']
667 654
668 655 def test_user_expression():
669 656 # enable all formatters
670 657 ip.display_formatter.active_types = ip.display_formatter.format_types
671 658 query = {
672 659 'a' : '1 + 2',
673 660 'b' : '1/0',
674 661 }
675 662 r = ip.user_expressions(query)
676 663 import pprint
677 664 pprint.pprint(r)
678 665 nt.assert_equal(set(r.keys()), set(query.keys()))
679 666 a = r['a']
680 667 nt.assert_equal(set(['status', 'data', 'metadata']), set(a.keys()))
681 668 nt.assert_equal(a['status'], 'ok')
682 669 data = a['data']
683 670 metadata = a['metadata']
684 671 nt.assert_equal(data.get('text/plain'), '3')
685 672
686 673 b = r['b']
687 674 nt.assert_equal(b['status'], 'error')
688 675 nt.assert_equal(b['ename'], 'ZeroDivisionError')
689 676
690 677 # back to text only
691 678 ip.display_formatter.active_types = ['text/plain']
692 679
693 680
694 681
695 682
696 683
697 684 class TestSyntaxErrorTransformer(unittest.TestCase):
698 685 """Check that SyntaxError raised by an input transformer is handled by run_cell()"""
699 686
700 687 class SyntaxErrorTransformer(InputTransformer):
701 688
702 689 def push(self, line):
703 690 pos = line.find('syntaxerror')
704 691 if pos >= 0:
705 692 e = SyntaxError('input contains "syntaxerror"')
706 693 e.text = line
707 694 e.offset = pos + 1
708 695 raise e
709 696 return line
710 697
711 698 def reset(self):
712 699 pass
713 700
714 701 def setUp(self):
715 702 self.transformer = TestSyntaxErrorTransformer.SyntaxErrorTransformer()
716 703 ip.input_splitter.python_line_transforms.append(self.transformer)
717 704 ip.input_transformer_manager.python_line_transforms.append(self.transformer)
718 705
719 706 def tearDown(self):
720 707 ip.input_splitter.python_line_transforms.remove(self.transformer)
721 708 ip.input_transformer_manager.python_line_transforms.remove(self.transformer)
722 709
723 710 def test_syntaxerror_input_transformer(self):
724 711 with tt.AssertPrints('1234'):
725 712 ip.run_cell('1234')
726 713 with tt.AssertPrints('SyntaxError: invalid syntax'):
727 714 ip.run_cell('1 2 3') # plain python syntax error
728 715 with tt.AssertPrints('SyntaxError: input contains "syntaxerror"'):
729 716 ip.run_cell('2345 # syntaxerror') # input transformer syntax error
730 717 with tt.AssertPrints('3456'):
731 718 ip.run_cell('3456')
732 719
733 720
734 721
@@ -1,624 +1,621 b''
1 1 // Copyright (c) IPython Development Team.
2 2 // Distributed under the terms of the Modified BSD License.
3 3
4 4 //============================================================================
5 5 // Kernel
6 6 //============================================================================
7 7
8 8 /**
9 9 * @module IPython
10 10 * @namespace IPython
11 11 * @submodule Kernel
12 12 */
13 13
14 14 var IPython = (function (IPython) {
15 15 "use strict";
16 16
17 17 var utils = IPython.utils;
18 18
19 19 // Initialization and connection.
20 20 /**
21 21 * A Kernel Class to communicate with the Python kernel
22 22 * @Class Kernel
23 23 */
24 24 var Kernel = function (kernel_service_url) {
25 25 this.kernel_id = null;
26 26 this.shell_channel = null;
27 27 this.iopub_channel = null;
28 28 this.stdin_channel = null;
29 29 this.kernel_service_url = kernel_service_url;
30 30 this.running = false;
31 31 this.username = "username";
32 32 this.session_id = utils.uuid();
33 33 this._msg_callbacks = {};
34 34 this.post = $.post;
35 35
36 36 if (typeof(WebSocket) !== 'undefined') {
37 37 this.WebSocket = WebSocket;
38 38 } else if (typeof(MozWebSocket) !== 'undefined') {
39 39 this.WebSocket = MozWebSocket;
40 40 } else {
41 41 alert('Your browser does not have WebSocket support, please try Chrome, Safari or Firefox β‰₯ 6. Firefox 4 and 5 are also supported by you have to enable WebSockets in about:config.');
42 42 }
43 43
44 44 this.bind_events();
45 45 this.init_iopub_handlers();
46 46 this.comm_manager = new IPython.CommManager(this);
47 47 this.widget_manager = new IPython.WidgetManager(this.comm_manager);
48 48
49 49 this.last_msg_id = null;
50 50 this.last_msg_callbacks = {};
51 51 };
52 52
53 53
54 54 Kernel.prototype._get_msg = function (msg_type, content, metadata) {
55 55 var msg = {
56 56 header : {
57 57 msg_id : utils.uuid(),
58 58 username : this.username,
59 59 session : this.session_id,
60 60 msg_type : msg_type
61 61 },
62 62 metadata : metadata || {},
63 63 content : content,
64 64 parent_header : {}
65 65 };
66 66 return msg;
67 67 };
68 68
69 69 Kernel.prototype.bind_events = function () {
70 70 var that = this;
71 71 $([IPython.events]).on('send_input_reply.Kernel', function(evt, data) {
72 72 that.send_input_reply(data);
73 73 });
74 74 };
75 75
76 76 // Initialize the iopub handlers
77 77
78 78 Kernel.prototype.init_iopub_handlers = function () {
79 79 var output_msg_types = ['stream', 'display_data', 'execute_result', 'error'];
80 80 this._iopub_handlers = {};
81 81 this.register_iopub_handler('status', $.proxy(this._handle_status_message, this));
82 82 this.register_iopub_handler('clear_output', $.proxy(this._handle_clear_output, this));
83 83
84 84 for (var i=0; i < output_msg_types.length; i++) {
85 85 this.register_iopub_handler(output_msg_types[i], $.proxy(this._handle_output_message, this));
86 86 }
87 87 };
88 88
89 89 /**
90 90 * Start the Python kernel
91 91 * @method start
92 92 */
93 93 Kernel.prototype.start = function (params) {
94 94 params = params || {};
95 95 if (!this.running) {
96 96 var qs = $.param(params);
97 97 this.post(utils.url_join_encode(this.kernel_service_url) + '?' + qs,
98 98 $.proxy(this._kernel_started, this),
99 99 'json'
100 100 );
101 101 }
102 102 };
103 103
104 104 /**
105 105 * Restart the python kernel.
106 106 *
107 107 * Emit a 'status_restarting.Kernel' event with
108 108 * the current object as parameter
109 109 *
110 110 * @method restart
111 111 */
112 112 Kernel.prototype.restart = function () {
113 113 $([IPython.events]).trigger('status_restarting.Kernel', {kernel: this});
114 114 if (this.running) {
115 115 this.stop_channels();
116 116 this.post(utils.url_join_encode(this.kernel_url, "restart"),
117 117 $.proxy(this._kernel_started, this),
118 118 'json'
119 119 );
120 120 }
121 121 };
122 122
123 123
124 124 Kernel.prototype._kernel_started = function (json) {
125 125 console.log("Kernel started: ", json.id);
126 126 this.running = true;
127 127 this.kernel_id = json.id;
128 128 // trailing 's' in https will become wss for secure web sockets
129 129 this.ws_host = location.protocol.replace('http', 'ws') + "//" + location.host;
130 130 this.kernel_url = utils.url_path_join(this.kernel_service_url, this.kernel_id);
131 131 this.start_channels();
132 132 };
133 133
134 134
135 135 Kernel.prototype._websocket_closed = function(ws_url, early) {
136 136 this.stop_channels();
137 137 $([IPython.events]).trigger('websocket_closed.Kernel',
138 138 {ws_url: ws_url, kernel: this, early: early}
139 139 );
140 140 };
141 141
142 142 /**
143 143 * Start the `shell`and `iopub` channels.
144 144 * Will stop and restart them if they already exist.
145 145 *
146 146 * @method start_channels
147 147 */
148 148 Kernel.prototype.start_channels = function () {
149 149 var that = this;
150 150 this.stop_channels();
151 151 var ws_host_url = this.ws_host + this.kernel_url;
152 152 console.log("Starting WebSockets:", ws_host_url);
153 153 this.shell_channel = new this.WebSocket(
154 154 this.ws_host + utils.url_join_encode(this.kernel_url, "shell")
155 155 );
156 156 this.stdin_channel = new this.WebSocket(
157 157 this.ws_host + utils.url_join_encode(this.kernel_url, "stdin")
158 158 );
159 159 this.iopub_channel = new this.WebSocket(
160 160 this.ws_host + utils.url_join_encode(this.kernel_url, "iopub")
161 161 );
162 162
163 163 var already_called_onclose = false; // only alert once
164 164 var ws_closed_early = function(evt){
165 165 if (already_called_onclose){
166 166 return;
167 167 }
168 168 already_called_onclose = true;
169 169 if ( ! evt.wasClean ){
170 170 that._websocket_closed(ws_host_url, true);
171 171 }
172 172 };
173 173 var ws_closed_late = function(evt){
174 174 if (already_called_onclose){
175 175 return;
176 176 }
177 177 already_called_onclose = true;
178 178 if ( ! evt.wasClean ){
179 179 that._websocket_closed(ws_host_url, false);
180 180 }
181 181 };
182 182 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
183 183 for (var i=0; i < channels.length; i++) {
184 184 channels[i].onopen = $.proxy(this._ws_opened, this);
185 185 channels[i].onclose = ws_closed_early;
186 186 }
187 187 // switch from early-close to late-close message after 1s
188 188 setTimeout(function() {
189 189 for (var i=0; i < channels.length; i++) {
190 190 if (channels[i] !== null) {
191 191 channels[i].onclose = ws_closed_late;
192 192 }
193 193 }
194 194 }, 1000);
195 195 this.shell_channel.onmessage = $.proxy(this._handle_shell_reply, this);
196 196 this.iopub_channel.onmessage = $.proxy(this._handle_iopub_message, this);
197 197 this.stdin_channel.onmessage = $.proxy(this._handle_input_request, this);
198 198 };
199 199
200 200 /**
201 201 * Handle a websocket entering the open state
202 202 * sends session and cookie authentication info as first message.
203 203 * Once all sockets are open, signal the Kernel.status_started event.
204 204 * @method _ws_opened
205 205 */
206 206 Kernel.prototype._ws_opened = function (evt) {
207 207 // send the session id so the Session object Python-side
208 208 // has the same identity
209 209 evt.target.send(this.session_id + ':' + document.cookie);
210 210
211 211 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
212 212 for (var i=0; i < channels.length; i++) {
213 213 // if any channel is not ready, don't trigger event.
214 214 if ( !channels[i].readyState ) return;
215 215 }
216 216 // all events ready, trigger started event.
217 217 $([IPython.events]).trigger('status_started.Kernel', {kernel: this});
218 218 };
219 219
220 220 /**
221 221 * Stop the websocket channels.
222 222 * @method stop_channels
223 223 */
224 224 Kernel.prototype.stop_channels = function () {
225 225 var channels = [this.shell_channel, this.iopub_channel, this.stdin_channel];
226 226 for (var i=0; i < channels.length; i++) {
227 227 if ( channels[i] !== null ) {
228 228 channels[i].onclose = null;
229 229 channels[i].close();
230 230 }
231 231 }
232 232 this.shell_channel = this.iopub_channel = this.stdin_channel = null;
233 233 };
234 234
235 235 // Main public methods.
236 236
237 237 // send a message on the Kernel's shell channel
238 238 Kernel.prototype.send_shell_message = function (msg_type, content, callbacks, metadata) {
239 239 var msg = this._get_msg(msg_type, content, metadata);
240 240 this.shell_channel.send(JSON.stringify(msg));
241 241 this.set_callbacks_for_msg(msg.header.msg_id, callbacks);
242 242 return msg.header.msg_id;
243 243 };
244 244
245 245 /**
246 246 * Get kernel info
247 247 *
248 248 * @param callback {function}
249 249 * @method object_info
250 250 *
251 251 * When calling this method, pass a callback function that expects one argument.
252 252 * The callback will be passed the complete `kernel_info_reply` message documented
253 253 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#kernel-info)
254 254 */
255 255 Kernel.prototype.kernel_info = function (callback) {
256 256 var callbacks;
257 257 if (callback) {
258 258 callbacks = { shell : { reply : callback } };
259 259 }
260 260 return this.send_shell_message("kernel_info_request", {}, callbacks);
261 261 };
262 262
263 263 /**
264 264 * Get info on an object
265 265 *
266 266 * @param objname {string}
267 267 * @param callback {function}
268 268 * @method object_info
269 269 *
270 270 * When calling this method, pass a callback function that expects one argument.
271 271 * The callback will be passed the complete `object_info_reply` message documented
272 272 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#object-information)
273 273 */
274 274 Kernel.prototype.object_info = function (objname, callback) {
275 275 var callbacks;
276 276 if (callback) {
277 277 callbacks = { shell : { reply : callback } };
278 278 }
279 279
280 280 if (typeof(objname) !== null && objname !== null) {
281 281 var content = {
282 282 oname : objname.toString(),
283 283 detail_level : 0,
284 284 };
285 285 return this.send_shell_message("object_info_request", content, callbacks);
286 286 }
287 287 return;
288 288 };
289 289
290 290 /**
291 291 * Execute given code into kernel, and pass result to callback.
292 292 *
293 293 * @async
294 294 * @method execute
295 295 * @param {string} code
296 296 * @param [callbacks] {Object} With the following keys (all optional)
297 297 * @param callbacks.shell.reply {function}
298 298 * @param callbacks.shell.payload.[payload_name] {function}
299 299 * @param callbacks.iopub.output {function}
300 300 * @param callbacks.iopub.clear_output {function}
301 301 * @param callbacks.input {function}
302 302 * @param {object} [options]
303 303 * @param [options.silent=false] {Boolean}
304 304 * @param [options.user_expressions=empty_dict] {Dict}
305 * @param [options.user_variables=empty_list] {List od Strings}
306 305 * @param [options.allow_stdin=false] {Boolean} true|false
307 306 *
308 307 * @example
309 308 *
310 309 * The options object should contain the options for the execute call. Its default
311 310 * values are:
312 311 *
313 312 * options = {
314 313 * silent : true,
315 * user_variables : [],
316 314 * user_expressions : {},
317 315 * allow_stdin : false
318 316 * }
319 317 *
320 318 * When calling this method pass a callbacks structure of the form:
321 319 *
322 320 * callbacks = {
323 321 * shell : {
324 322 * reply : execute_reply_callback,
325 323 * payload : {
326 324 * set_next_input : set_next_input_callback,
327 325 * }
328 326 * },
329 327 * iopub : {
330 328 * output : output_callback,
331 329 * clear_output : clear_output_callback,
332 330 * },
333 331 * input : raw_input_callback
334 332 * }
335 333 *
336 334 * Each callback will be passed the entire message as a single arugment.
337 335 * Payload handlers will be passed the corresponding payload and the execute_reply message.
338 336 */
339 337 Kernel.prototype.execute = function (code, callbacks, options) {
340 338
341 339 var content = {
342 340 code : code,
343 341 silent : true,
344 342 store_history : false,
345 user_variables : [],
346 343 user_expressions : {},
347 344 allow_stdin : false
348 345 };
349 346 callbacks = callbacks || {};
350 347 if (callbacks.input !== undefined) {
351 348 content.allow_stdin = true;
352 349 }
353 350 $.extend(true, content, options);
354 351 $([IPython.events]).trigger('execution_request.Kernel', {kernel: this, content:content});
355 352 return this.send_shell_message("execute_request", content, callbacks);
356 353 };
357 354
358 355 /**
359 356 * When calling this method, pass a function to be called with the `complete_reply` message
360 357 * as its only argument when it arrives.
361 358 *
362 359 * `complete_reply` is documented
363 360 * [here](http://ipython.org/ipython-doc/dev/development/messaging.html#complete)
364 361 *
365 362 * @method complete
366 363 * @param line {integer}
367 364 * @param cursor_pos {integer}
368 365 * @param callback {function}
369 366 *
370 367 */
371 368 Kernel.prototype.complete = function (line, cursor_pos, callback) {
372 369 var callbacks;
373 370 if (callback) {
374 371 callbacks = { shell : { reply : callback } };
375 372 }
376 373 var content = {
377 374 text : '',
378 375 line : line,
379 376 block : null,
380 377 cursor_pos : cursor_pos
381 378 };
382 379 return this.send_shell_message("complete_request", content, callbacks);
383 380 };
384 381
385 382
386 383 Kernel.prototype.interrupt = function () {
387 384 if (this.running) {
388 385 $([IPython.events]).trigger('status_interrupting.Kernel', {kernel: this});
389 386 this.post(utils.url_join_encode(this.kernel_url, "interrupt"));
390 387 }
391 388 };
392 389
393 390
394 391 Kernel.prototype.kill = function () {
395 392 if (this.running) {
396 393 this.running = false;
397 394 var settings = {
398 395 cache : false,
399 396 type : "DELETE",
400 397 error : utils.log_ajax_error,
401 398 };
402 399 $.ajax(utils.url_join_encode(this.kernel_url), settings);
403 400 }
404 401 };
405 402
406 403 Kernel.prototype.send_input_reply = function (input) {
407 404 var content = {
408 405 value : input,
409 406 };
410 407 $([IPython.events]).trigger('input_reply.Kernel', {kernel: this, content:content});
411 408 var msg = this._get_msg("input_reply", content);
412 409 this.stdin_channel.send(JSON.stringify(msg));
413 410 return msg.header.msg_id;
414 411 };
415 412
416 413
417 414 // Reply handlers
418 415
419 416 Kernel.prototype.register_iopub_handler = function (msg_type, callback) {
420 417 this._iopub_handlers[msg_type] = callback;
421 418 };
422 419
423 420 Kernel.prototype.get_iopub_handler = function (msg_type) {
424 421 // get iopub handler for a specific message type
425 422 return this._iopub_handlers[msg_type];
426 423 };
427 424
428 425
429 426 Kernel.prototype.get_callbacks_for_msg = function (msg_id) {
430 427 // get callbacks for a specific message
431 428 if (msg_id == this.last_msg_id) {
432 429 return this.last_msg_callbacks;
433 430 } else {
434 431 return this._msg_callbacks[msg_id];
435 432 }
436 433 };
437 434
438 435
439 436 Kernel.prototype.clear_callbacks_for_msg = function (msg_id) {
440 437 if (this._msg_callbacks[msg_id] !== undefined ) {
441 438 delete this._msg_callbacks[msg_id];
442 439 }
443 440 };
444 441
445 442 Kernel.prototype._finish_shell = function (msg_id) {
446 443 var callbacks = this._msg_callbacks[msg_id];
447 444 if (callbacks !== undefined) {
448 445 callbacks.shell_done = true;
449 446 if (callbacks.iopub_done) {
450 447 this.clear_callbacks_for_msg(msg_id);
451 448 }
452 449 }
453 450 };
454 451
455 452 Kernel.prototype._finish_iopub = function (msg_id) {
456 453 var callbacks = this._msg_callbacks[msg_id];
457 454 if (callbacks !== undefined) {
458 455 callbacks.iopub_done = true;
459 456 if (!callbacks.shell_done) {
460 457 this.clear_callbacks_for_msg(msg_id);
461 458 }
462 459 }
463 460 };
464 461
465 462 /* Set callbacks for a particular message.
466 463 * Callbacks should be a struct of the following form:
467 464 * shell : {
468 465 *
469 466 * }
470 467
471 468 */
472 469 Kernel.prototype.set_callbacks_for_msg = function (msg_id, callbacks) {
473 470 this.last_msg_id = msg_id;
474 471 if (callbacks) {
475 472 // shallow-copy mapping, because we will modify it at the top level
476 473 var cbcopy = this._msg_callbacks[msg_id] = this.last_msg_callbacks = {};
477 474 cbcopy.shell = callbacks.shell;
478 475 cbcopy.iopub = callbacks.iopub;
479 476 cbcopy.input = callbacks.input;
480 477 cbcopy.shell_done = (!callbacks.shell);
481 478 cbcopy.iopub_done = (!callbacks.iopub);
482 479 } else {
483 480 this.last_msg_callbacks = {};
484 481 }
485 482 };
486 483
487 484
488 485 Kernel.prototype._handle_shell_reply = function (e) {
489 486 var reply = $.parseJSON(e.data);
490 487 $([IPython.events]).trigger('shell_reply.Kernel', {kernel: this, reply:reply});
491 488 var content = reply.content;
492 489 var metadata = reply.metadata;
493 490 var parent_id = reply.parent_header.msg_id;
494 491 var callbacks = this.get_callbacks_for_msg(parent_id);
495 492 if (!callbacks || !callbacks.shell) {
496 493 return;
497 494 }
498 495 var shell_callbacks = callbacks.shell;
499 496
500 497 // signal that shell callbacks are done
501 498 this._finish_shell(parent_id);
502 499
503 500 if (shell_callbacks.reply !== undefined) {
504 501 shell_callbacks.reply(reply);
505 502 }
506 503 if (content.payload && shell_callbacks.payload) {
507 504 this._handle_payloads(content.payload, shell_callbacks.payload, reply);
508 505 }
509 506 };
510 507
511 508
512 509 Kernel.prototype._handle_payloads = function (payloads, payload_callbacks, msg) {
513 510 var l = payloads.length;
514 511 // Payloads are handled by triggering events because we don't want the Kernel
515 512 // to depend on the Notebook or Pager classes.
516 513 for (var i=0; i<l; i++) {
517 514 var payload = payloads[i];
518 515 var callback = payload_callbacks[payload.source];
519 516 if (callback) {
520 517 callback(payload, msg);
521 518 }
522 519 }
523 520 };
524 521
525 522 Kernel.prototype._handle_status_message = function (msg) {
526 523 var execution_state = msg.content.execution_state;
527 524 var parent_id = msg.parent_header.msg_id;
528 525
529 526 // dispatch status msg callbacks, if any
530 527 var callbacks = this.get_callbacks_for_msg(parent_id);
531 528 if (callbacks && callbacks.iopub && callbacks.iopub.status) {
532 529 try {
533 530 callbacks.iopub.status(msg);
534 531 } catch (e) {
535 532 console.log("Exception in status msg handler", e, e.stack);
536 533 }
537 534 }
538 535
539 536 if (execution_state === 'busy') {
540 537 $([IPython.events]).trigger('status_busy.Kernel', {kernel: this});
541 538 } else if (execution_state === 'idle') {
542 539 // signal that iopub callbacks are (probably) done
543 540 // async output may still arrive,
544 541 // but only for the most recent request
545 542 this._finish_iopub(parent_id);
546 543
547 544 // trigger status_idle event
548 545 $([IPython.events]).trigger('status_idle.Kernel', {kernel: this});
549 546 } else if (execution_state === 'restarting') {
550 547 // autorestarting is distinct from restarting,
551 548 // in that it means the kernel died and the server is restarting it.
552 549 // status_restarting sets the notification widget,
553 550 // autorestart shows the more prominent dialog.
554 551 $([IPython.events]).trigger('status_autorestarting.Kernel', {kernel: this});
555 552 $([IPython.events]).trigger('status_restarting.Kernel', {kernel: this});
556 553 } else if (execution_state === 'dead') {
557 554 this.stop_channels();
558 555 $([IPython.events]).trigger('status_dead.Kernel', {kernel: this});
559 556 }
560 557 };
561 558
562 559
563 560 // handle clear_output message
564 561 Kernel.prototype._handle_clear_output = function (msg) {
565 562 var callbacks = this.get_callbacks_for_msg(msg.parent_header.msg_id);
566 563 if (!callbacks || !callbacks.iopub) {
567 564 return;
568 565 }
569 566 var callback = callbacks.iopub.clear_output;
570 567 if (callback) {
571 568 callback(msg);
572 569 }
573 570 };
574 571
575 572
576 573 // handle an output message (execute_result, display_data, etc.)
577 574 Kernel.prototype._handle_output_message = function (msg) {
578 575 var callbacks = this.get_callbacks_for_msg(msg.parent_header.msg_id);
579 576 if (!callbacks || !callbacks.iopub) {
580 577 return;
581 578 }
582 579 var callback = callbacks.iopub.output;
583 580 if (callback) {
584 581 callback(msg);
585 582 }
586 583 };
587 584
588 585 // dispatch IOPub messages to respective handlers.
589 586 // each message type should have a handler.
590 587 Kernel.prototype._handle_iopub_message = function (e) {
591 588 var msg = $.parseJSON(e.data);
592 589
593 590 var handler = this.get_iopub_handler(msg.header.msg_type);
594 591 if (handler !== undefined) {
595 592 handler(msg);
596 593 }
597 594 };
598 595
599 596
600 597 Kernel.prototype._handle_input_request = function (e) {
601 598 var request = $.parseJSON(e.data);
602 599 var header = request.header;
603 600 var content = request.content;
604 601 var metadata = request.metadata;
605 602 var msg_type = header.msg_type;
606 603 if (msg_type !== 'input_request') {
607 604 console.log("Invalid input request!", request);
608 605 return;
609 606 }
610 607 var callbacks = this.get_callbacks_for_msg(request.parent_header.msg_id);
611 608 if (callbacks) {
612 609 if (callbacks.input) {
613 610 callbacks.input(request);
614 611 }
615 612 }
616 613 };
617 614
618 615
619 616 IPython.Kernel = Kernel;
620 617
621 618 return IPython;
622 619
623 620 }(IPython));
624 621
@@ -1,637 +1,618 b''
1 """Base classes to manage a Client's interaction with a running kernel
2 """
1 """Base classes to manage a Client's interaction with a running kernel"""
3 2
4 #-----------------------------------------------------------------------------
5 # Copyright (C) 2013 The IPython Development Team
6 #
7 # Distributed under the terms of the BSD License. The full license is in
8 # the file COPYING, distributed as part of this software.
9 #-----------------------------------------------------------------------------
10
11 #-----------------------------------------------------------------------------
12 # Imports
13 #-----------------------------------------------------------------------------
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
14 5
15 6 from __future__ import absolute_import
16 7
17 # Standard library imports
18 8 import atexit
19 9 import errno
20 10 from threading import Thread
21 11 import time
22 12
23 13 import zmq
24 14 # import ZMQError in top-level namespace, to avoid ugly attribute-error messages
25 15 # during garbage collection of threads at exit:
26 16 from zmq import ZMQError
27 17 from zmq.eventloop import ioloop, zmqstream
28 18
29 19 # Local imports
30 20 from .channelsabc import (
31 21 ShellChannelABC, IOPubChannelABC,
32 22 HBChannelABC, StdInChannelABC,
33 23 )
34 24 from IPython.utils.py3compat import string_types, iteritems
35 25
36 26 #-----------------------------------------------------------------------------
37 27 # Constants and exceptions
38 28 #-----------------------------------------------------------------------------
39 29
40 30 class InvalidPortNumber(Exception):
41 31 pass
42 32
43 33 #-----------------------------------------------------------------------------
44 34 # Utility functions
45 35 #-----------------------------------------------------------------------------
46 36
47 37 # some utilities to validate message structure, these might get moved elsewhere
48 38 # if they prove to have more generic utility
49 39
50 40 def validate_string_list(lst):
51 41 """Validate that the input is a list of strings.
52 42
53 43 Raises ValueError if not."""
54 44 if not isinstance(lst, list):
55 45 raise ValueError('input %r must be a list' % lst)
56 46 for x in lst:
57 47 if not isinstance(x, string_types):
58 48 raise ValueError('element %r in list must be a string' % x)
59 49
60 50
61 51 def validate_string_dict(dct):
62 52 """Validate that the input is a dict with string keys and values.
63 53
64 54 Raises ValueError if not."""
65 55 for k,v in iteritems(dct):
66 56 if not isinstance(k, string_types):
67 57 raise ValueError('key %r in dict must be a string' % k)
68 58 if not isinstance(v, string_types):
69 59 raise ValueError('value %r in dict must be a string' % v)
70 60
71 61
72 62 #-----------------------------------------------------------------------------
73 63 # ZMQ Socket Channel classes
74 64 #-----------------------------------------------------------------------------
75 65
76 66 class ZMQSocketChannel(Thread):
77 67 """The base class for the channels that use ZMQ sockets."""
78 68 context = None
79 69 session = None
80 70 socket = None
81 71 ioloop = None
82 72 stream = None
83 73 _address = None
84 74 _exiting = False
85 75 proxy_methods = []
86 76
87 77 def __init__(self, context, session, address):
88 78 """Create a channel.
89 79
90 80 Parameters
91 81 ----------
92 82 context : :class:`zmq.Context`
93 83 The ZMQ context to use.
94 84 session : :class:`session.Session`
95 85 The session to use.
96 86 address : zmq url
97 87 Standard (ip, port) tuple that the kernel is listening on.
98 88 """
99 89 super(ZMQSocketChannel, self).__init__()
100 90 self.daemon = True
101 91
102 92 self.context = context
103 93 self.session = session
104 94 if isinstance(address, tuple):
105 95 if address[1] == 0:
106 96 message = 'The port number for a channel cannot be 0.'
107 97 raise InvalidPortNumber(message)
108 98 address = "tcp://%s:%i" % address
109 99 self._address = address
110 100 atexit.register(self._notice_exit)
111 101
112 102 def _notice_exit(self):
113 103 self._exiting = True
114 104
115 105 def _run_loop(self):
116 106 """Run my loop, ignoring EINTR events in the poller"""
117 107 while True:
118 108 try:
119 109 self.ioloop.start()
120 110 except ZMQError as e:
121 111 if e.errno == errno.EINTR:
122 112 continue
123 113 else:
124 114 raise
125 115 except Exception:
126 116 if self._exiting:
127 117 break
128 118 else:
129 119 raise
130 120 else:
131 121 break
132 122
133 123 def stop(self):
134 124 """Stop the channel's event loop and join its thread.
135 125
136 126 This calls :meth:`~threading.Thread.join` and returns when the thread
137 127 terminates. :class:`RuntimeError` will be raised if
138 128 :meth:`~threading.Thread.start` is called again.
139 129 """
140 130 if self.ioloop is not None:
141 131 self.ioloop.stop()
142 132 self.join()
143 133 self.close()
144 134
145 135 def close(self):
146 136 if self.ioloop is not None:
147 137 try:
148 138 self.ioloop.close(all_fds=True)
149 139 except Exception:
150 140 pass
151 141 if self.socket is not None:
152 142 try:
153 143 self.socket.close(linger=0)
154 144 except Exception:
155 145 pass
156 146 self.socket = None
157 147
158 148 @property
159 149 def address(self):
160 150 """Get the channel's address as a zmq url string.
161 151
162 152 These URLS have the form: 'tcp://127.0.0.1:5555'.
163 153 """
164 154 return self._address
165 155
166 156 def _queue_send(self, msg):
167 157 """Queue a message to be sent from the IOLoop's thread.
168 158
169 159 Parameters
170 160 ----------
171 161 msg : message to send
172 162
173 163 This is threadsafe, as it uses IOLoop.add_callback to give the loop's
174 164 thread control of the action.
175 165 """
176 166 def thread_send():
177 167 self.session.send(self.stream, msg)
178 168 self.ioloop.add_callback(thread_send)
179 169
180 170 def _handle_recv(self, msg):
181 171 """Callback for stream.on_recv.
182 172
183 173 Unpacks message, and calls handlers with it.
184 174 """
185 175 ident,smsg = self.session.feed_identities(msg)
186 176 self.call_handlers(self.session.unserialize(smsg))
187 177
188 178
189 179
190 180 class ShellChannel(ZMQSocketChannel):
191 181 """The shell channel for issuing request/replies to the kernel."""
192 182
193 183 command_queue = None
194 184 # flag for whether execute requests should be allowed to call raw_input:
195 185 allow_stdin = True
196 186 proxy_methods = [
197 187 'execute',
198 188 'complete',
199 189 'object_info',
200 190 'history',
201 191 'kernel_info',
202 192 'shutdown',
203 193 ]
204 194
205 195 def __init__(self, context, session, address):
206 196 super(ShellChannel, self).__init__(context, session, address)
207 197 self.ioloop = ioloop.IOLoop()
208 198
209 199 def run(self):
210 200 """The thread's main activity. Call start() instead."""
211 201 self.socket = self.context.socket(zmq.DEALER)
212 202 self.socket.linger = 1000
213 203 self.socket.setsockopt(zmq.IDENTITY, self.session.bsession)
214 204 self.socket.connect(self.address)
215 205 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
216 206 self.stream.on_recv(self._handle_recv)
217 207 self._run_loop()
218 208
219 209 def call_handlers(self, msg):
220 210 """This method is called in the ioloop thread when a message arrives.
221 211
222 212 Subclasses should override this method to handle incoming messages.
223 213 It is important to remember that this method is called in the thread
224 214 so that some logic must be done to ensure that the application level
225 215 handlers are called in the application thread.
226 216 """
227 217 raise NotImplementedError('call_handlers must be defined in a subclass.')
228 218
229 219 def execute(self, code, silent=False, store_history=True,
230 user_variables=None, user_expressions=None, allow_stdin=None):
220 user_expressions=None, allow_stdin=None):
231 221 """Execute code in the kernel.
232 222
233 223 Parameters
234 224 ----------
235 225 code : str
236 226 A string of Python code.
237 227
238 228 silent : bool, optional (default False)
239 229 If set, the kernel will execute the code as quietly possible, and
240 230 will force store_history to be False.
241 231
242 232 store_history : bool, optional (default True)
243 233 If set, the kernel will store command history. This is forced
244 234 to be False if silent is True.
245 235
246 user_variables : list, optional
247 A list of variable names to pull from the user's namespace. They
248 will come back as a dict with these names as keys and their
249 :func:`repr` as values.
250
251 236 user_expressions : dict, optional
252 237 A dict mapping names to expressions to be evaluated in the user's
253 238 dict. The expression values are returned as strings formatted using
254 239 :func:`repr`.
255 240
256 241 allow_stdin : bool, optional (default self.allow_stdin)
257 242 Flag for whether the kernel can send stdin requests to frontends.
258 243
259 244 Some frontends (e.g. the Notebook) do not support stdin requests.
260 245 If raw_input is called from code executed from such a frontend, a
261 246 StdinNotImplementedError will be raised.
262 247
263 248 Returns
264 249 -------
265 250 The msg_id of the message sent.
266 251 """
267 if user_variables is None:
268 user_variables = []
269 252 if user_expressions is None:
270 253 user_expressions = {}
271 254 if allow_stdin is None:
272 255 allow_stdin = self.allow_stdin
273 256
274 257
275 258 # Don't waste network traffic if inputs are invalid
276 259 if not isinstance(code, string_types):
277 260 raise ValueError('code %r must be a string' % code)
278 validate_string_list(user_variables)
279 261 validate_string_dict(user_expressions)
280 262
281 263 # Create class for content/msg creation. Related to, but possibly
282 264 # not in Session.
283 265 content = dict(code=code, silent=silent, store_history=store_history,
284 user_variables=user_variables,
285 266 user_expressions=user_expressions,
286 267 allow_stdin=allow_stdin,
287 268 )
288 269 msg = self.session.msg('execute_request', content)
289 270 self._queue_send(msg)
290 271 return msg['header']['msg_id']
291 272
292 273 def complete(self, text, line, cursor_pos, block=None):
293 274 """Tab complete text in the kernel's namespace.
294 275
295 276 Parameters
296 277 ----------
297 278 text : str
298 279 The text to complete.
299 280 line : str
300 281 The full line of text that is the surrounding context for the
301 282 text to complete.
302 283 cursor_pos : int
303 284 The position of the cursor in the line where the completion was
304 285 requested.
305 286 block : str, optional
306 287 The full block of code in which the completion is being requested.
307 288
308 289 Returns
309 290 -------
310 291 The msg_id of the message sent.
311 292 """
312 293 content = dict(text=text, line=line, block=block, cursor_pos=cursor_pos)
313 294 msg = self.session.msg('complete_request', content)
314 295 self._queue_send(msg)
315 296 return msg['header']['msg_id']
316 297
317 298 def object_info(self, oname, detail_level=0):
318 299 """Get metadata information about an object in the kernel's namespace.
319 300
320 301 Parameters
321 302 ----------
322 303 oname : str
323 304 A string specifying the object name.
324 305 detail_level : int, optional
325 306 The level of detail for the introspection (0-2)
326 307
327 308 Returns
328 309 -------
329 310 The msg_id of the message sent.
330 311 """
331 312 content = dict(oname=oname, detail_level=detail_level)
332 313 msg = self.session.msg('object_info_request', content)
333 314 self._queue_send(msg)
334 315 return msg['header']['msg_id']
335 316
336 317 def history(self, raw=True, output=False, hist_access_type='range', **kwargs):
337 318 """Get entries from the kernel's history list.
338 319
339 320 Parameters
340 321 ----------
341 322 raw : bool
342 323 If True, return the raw input.
343 324 output : bool
344 325 If True, then return the output as well.
345 326 hist_access_type : str
346 327 'range' (fill in session, start and stop params), 'tail' (fill in n)
347 328 or 'search' (fill in pattern param).
348 329
349 330 session : int
350 331 For a range request, the session from which to get lines. Session
351 332 numbers are positive integers; negative ones count back from the
352 333 current session.
353 334 start : int
354 335 The first line number of a history range.
355 336 stop : int
356 337 The final (excluded) line number of a history range.
357 338
358 339 n : int
359 340 The number of lines of history to get for a tail request.
360 341
361 342 pattern : str
362 343 The glob-syntax pattern for a search request.
363 344
364 345 Returns
365 346 -------
366 347 The msg_id of the message sent.
367 348 """
368 349 content = dict(raw=raw, output=output, hist_access_type=hist_access_type,
369 350 **kwargs)
370 351 msg = self.session.msg('history_request', content)
371 352 self._queue_send(msg)
372 353 return msg['header']['msg_id']
373 354
374 355 def kernel_info(self):
375 356 """Request kernel info."""
376 357 msg = self.session.msg('kernel_info_request')
377 358 self._queue_send(msg)
378 359 return msg['header']['msg_id']
379 360
380 361 def shutdown(self, restart=False):
381 362 """Request an immediate kernel shutdown.
382 363
383 364 Upon receipt of the (empty) reply, client code can safely assume that
384 365 the kernel has shut down and it's safe to forcefully terminate it if
385 366 it's still alive.
386 367
387 368 The kernel will send the reply via a function registered with Python's
388 369 atexit module, ensuring it's truly done as the kernel is done with all
389 370 normal operation.
390 371 """
391 372 # Send quit message to kernel. Once we implement kernel-side setattr,
392 373 # this should probably be done that way, but for now this will do.
393 374 msg = self.session.msg('shutdown_request', {'restart':restart})
394 375 self._queue_send(msg)
395 376 return msg['header']['msg_id']
396 377
397 378
398 379
399 380 class IOPubChannel(ZMQSocketChannel):
400 381 """The iopub channel which listens for messages that the kernel publishes.
401 382
402 383 This channel is where all output is published to frontends.
403 384 """
404 385
405 386 def __init__(self, context, session, address):
406 387 super(IOPubChannel, self).__init__(context, session, address)
407 388 self.ioloop = ioloop.IOLoop()
408 389
409 390 def run(self):
410 391 """The thread's main activity. Call start() instead."""
411 392 self.socket = self.context.socket(zmq.SUB)
412 393 self.socket.linger = 1000
413 394 self.socket.setsockopt(zmq.SUBSCRIBE,b'')
414 395 self.socket.setsockopt(zmq.IDENTITY, self.session.bsession)
415 396 self.socket.connect(self.address)
416 397 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
417 398 self.stream.on_recv(self._handle_recv)
418 399 self._run_loop()
419 400
420 401 def call_handlers(self, msg):
421 402 """This method is called in the ioloop thread when a message arrives.
422 403
423 404 Subclasses should override this method to handle incoming messages.
424 405 It is important to remember that this method is called in the thread
425 406 so that some logic must be done to ensure that the application leve
426 407 handlers are called in the application thread.
427 408 """
428 409 raise NotImplementedError('call_handlers must be defined in a subclass.')
429 410
430 411 def flush(self, timeout=1.0):
431 412 """Immediately processes all pending messages on the iopub channel.
432 413
433 414 Callers should use this method to ensure that :meth:`call_handlers`
434 415 has been called for all messages that have been received on the
435 416 0MQ SUB socket of this channel.
436 417
437 418 This method is thread safe.
438 419
439 420 Parameters
440 421 ----------
441 422 timeout : float, optional
442 423 The maximum amount of time to spend flushing, in seconds. The
443 424 default is one second.
444 425 """
445 426 # We do the IOLoop callback process twice to ensure that the IOLoop
446 427 # gets to perform at least one full poll.
447 428 stop_time = time.time() + timeout
448 429 for i in range(2):
449 430 self._flushed = False
450 431 self.ioloop.add_callback(self._flush)
451 432 while not self._flushed and time.time() < stop_time:
452 433 time.sleep(0.01)
453 434
454 435 def _flush(self):
455 436 """Callback for :method:`self.flush`."""
456 437 self.stream.flush()
457 438 self._flushed = True
458 439
459 440
460 441 class StdInChannel(ZMQSocketChannel):
461 442 """The stdin channel to handle raw_input requests that the kernel makes."""
462 443
463 444 msg_queue = None
464 445 proxy_methods = ['input']
465 446
466 447 def __init__(self, context, session, address):
467 448 super(StdInChannel, self).__init__(context, session, address)
468 449 self.ioloop = ioloop.IOLoop()
469 450
470 451 def run(self):
471 452 """The thread's main activity. Call start() instead."""
472 453 self.socket = self.context.socket(zmq.DEALER)
473 454 self.socket.linger = 1000
474 455 self.socket.setsockopt(zmq.IDENTITY, self.session.bsession)
475 456 self.socket.connect(self.address)
476 457 self.stream = zmqstream.ZMQStream(self.socket, self.ioloop)
477 458 self.stream.on_recv(self._handle_recv)
478 459 self._run_loop()
479 460
480 461 def call_handlers(self, msg):
481 462 """This method is called in the ioloop thread when a message arrives.
482 463
483 464 Subclasses should override this method to handle incoming messages.
484 465 It is important to remember that this method is called in the thread
485 466 so that some logic must be done to ensure that the application leve
486 467 handlers are called in the application thread.
487 468 """
488 469 raise NotImplementedError('call_handlers must be defined in a subclass.')
489 470
490 471 def input(self, string):
491 472 """Send a string of raw input to the kernel."""
492 473 content = dict(value=string)
493 474 msg = self.session.msg('input_reply', content)
494 475 self._queue_send(msg)
495 476
496 477
497 478 class HBChannel(ZMQSocketChannel):
498 479 """The heartbeat channel which monitors the kernel heartbeat.
499 480
500 481 Note that the heartbeat channel is paused by default. As long as you start
501 482 this channel, the kernel manager will ensure that it is paused and un-paused
502 483 as appropriate.
503 484 """
504 485
505 486 time_to_dead = 3.0
506 487 socket = None
507 488 poller = None
508 489 _running = None
509 490 _pause = None
510 491 _beating = None
511 492
512 493 def __init__(self, context, session, address):
513 494 super(HBChannel, self).__init__(context, session, address)
514 495 self._running = False
515 496 self._pause =True
516 497 self.poller = zmq.Poller()
517 498
518 499 def _create_socket(self):
519 500 if self.socket is not None:
520 501 # close previous socket, before opening a new one
521 502 self.poller.unregister(self.socket)
522 503 self.socket.close()
523 504 self.socket = self.context.socket(zmq.REQ)
524 505 self.socket.linger = 1000
525 506 self.socket.connect(self.address)
526 507
527 508 self.poller.register(self.socket, zmq.POLLIN)
528 509
529 510 def _poll(self, start_time):
530 511 """poll for heartbeat replies until we reach self.time_to_dead.
531 512
532 513 Ignores interrupts, and returns the result of poll(), which
533 514 will be an empty list if no messages arrived before the timeout,
534 515 or the event tuple if there is a message to receive.
535 516 """
536 517
537 518 until_dead = self.time_to_dead - (time.time() - start_time)
538 519 # ensure poll at least once
539 520 until_dead = max(until_dead, 1e-3)
540 521 events = []
541 522 while True:
542 523 try:
543 524 events = self.poller.poll(1000 * until_dead)
544 525 except ZMQError as e:
545 526 if e.errno == errno.EINTR:
546 527 # ignore interrupts during heartbeat
547 528 # this may never actually happen
548 529 until_dead = self.time_to_dead - (time.time() - start_time)
549 530 until_dead = max(until_dead, 1e-3)
550 531 pass
551 532 else:
552 533 raise
553 534 except Exception:
554 535 if self._exiting:
555 536 break
556 537 else:
557 538 raise
558 539 else:
559 540 break
560 541 return events
561 542
562 543 def run(self):
563 544 """The thread's main activity. Call start() instead."""
564 545 self._create_socket()
565 546 self._running = True
566 547 self._beating = True
567 548
568 549 while self._running:
569 550 if self._pause:
570 551 # just sleep, and skip the rest of the loop
571 552 time.sleep(self.time_to_dead)
572 553 continue
573 554
574 555 since_last_heartbeat = 0.0
575 556 # io.rprint('Ping from HB channel') # dbg
576 557 # no need to catch EFSM here, because the previous event was
577 558 # either a recv or connect, which cannot be followed by EFSM
578 559 self.socket.send(b'ping')
579 560 request_time = time.time()
580 561 ready = self._poll(request_time)
581 562 if ready:
582 563 self._beating = True
583 564 # the poll above guarantees we have something to recv
584 565 self.socket.recv()
585 566 # sleep the remainder of the cycle
586 567 remainder = self.time_to_dead - (time.time() - request_time)
587 568 if remainder > 0:
588 569 time.sleep(remainder)
589 570 continue
590 571 else:
591 572 # nothing was received within the time limit, signal heart failure
592 573 self._beating = False
593 574 since_last_heartbeat = time.time() - request_time
594 575 self.call_handlers(since_last_heartbeat)
595 576 # and close/reopen the socket, because the REQ/REP cycle has been broken
596 577 self._create_socket()
597 578 continue
598 579
599 580 def pause(self):
600 581 """Pause the heartbeat."""
601 582 self._pause = True
602 583
603 584 def unpause(self):
604 585 """Unpause the heartbeat."""
605 586 self._pause = False
606 587
607 588 def is_beating(self):
608 589 """Is the heartbeat running and responsive (and not paused)."""
609 590 if self.is_alive() and not self._pause and self._beating:
610 591 return True
611 592 else:
612 593 return False
613 594
614 595 def stop(self):
615 596 """Stop the channel's event loop and join its thread."""
616 597 self._running = False
617 598 super(HBChannel, self).stop()
618 599
619 600 def call_handlers(self, since_last_heartbeat):
620 601 """This method is called in the ioloop thread when a message arrives.
621 602
622 603 Subclasses should override this method to handle incoming messages.
623 604 It is important to remember that this method is called in the thread
624 605 so that some logic must be done to ensure that the application level
625 606 handlers are called in the application thread.
626 607 """
627 608 raise NotImplementedError('call_handlers must be defined in a subclass.')
628 609
629 610
630 611 #---------------------------------------------------------------------#-----------------------------------------------------------------------------
631 612 # ABC Registration
632 613 #-----------------------------------------------------------------------------
633 614
634 615 ShellChannelABC.register(ShellChannel)
635 616 IOPubChannelABC.register(IOPubChannel)
636 617 HBChannelABC.register(HBChannel)
637 618 StdInChannelABC.register(StdInChannel)
@@ -1,117 +1,113 b''
1 1 """Abstract base classes for kernel client channels"""
2 2
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2013 The IPython Development Team
5 #
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
9 5
10 6 import abc
11 7
12 8 from IPython.utils.py3compat import with_metaclass
13 9
14 10
15 11 class ChannelABC(with_metaclass(abc.ABCMeta, object)):
16 12 """A base class for all channel ABCs."""
17 13
18 14 @abc.abstractmethod
19 15 def start(self):
20 16 pass
21 17
22 18 @abc.abstractmethod
23 19 def stop(self):
24 20 pass
25 21
26 22 @abc.abstractmethod
27 23 def is_alive(self):
28 24 pass
29 25
30 26
31 27 class ShellChannelABC(ChannelABC):
32 28 """ShellChannel ABC.
33 29
34 30 The docstrings for this class can be found in the base implementation:
35 31
36 32 `IPython.kernel.channels.ShellChannel`
37 33 """
38 34
39 35 @abc.abstractproperty
40 36 def allow_stdin(self):
41 37 pass
42 38
43 39 @abc.abstractmethod
44 40 def execute(self, code, silent=False, store_history=True,
45 user_variables=None, user_expressions=None, allow_stdin=None):
41 user_expressions=None, allow_stdin=None):
46 42 pass
47 43
48 44 @abc.abstractmethod
49 45 def complete(self, text, line, cursor_pos, block=None):
50 46 pass
51 47
52 48 @abc.abstractmethod
53 49 def object_info(self, oname, detail_level=0):
54 50 pass
55 51
56 52 @abc.abstractmethod
57 53 def history(self, raw=True, output=False, hist_access_type='range', **kwargs):
58 54 pass
59 55
60 56 @abc.abstractmethod
61 57 def kernel_info(self):
62 58 pass
63 59
64 60 @abc.abstractmethod
65 61 def shutdown(self, restart=False):
66 62 pass
67 63
68 64
69 65 class IOPubChannelABC(ChannelABC):
70 66 """IOPubChannel ABC.
71 67
72 68 The docstrings for this class can be found in the base implementation:
73 69
74 70 `IPython.kernel.channels.IOPubChannel`
75 71 """
76 72
77 73 @abc.abstractmethod
78 74 def flush(self, timeout=1.0):
79 75 pass
80 76
81 77
82 78 class StdInChannelABC(ChannelABC):
83 79 """StdInChannel ABC.
84 80
85 81 The docstrings for this class can be found in the base implementation:
86 82
87 83 `IPython.kernel.channels.StdInChannel`
88 84 """
89 85
90 86 @abc.abstractmethod
91 87 def input(self, string):
92 88 pass
93 89
94 90
95 91 class HBChannelABC(ChannelABC):
96 92 """HBChannel ABC.
97 93
98 94 The docstrings for this class can be found in the base implementation:
99 95
100 96 `IPython.kernel.channels.HBChannel`
101 97 """
102 98
103 99 @abc.abstractproperty
104 100 def time_to_dead(self):
105 101 pass
106 102
107 103 @abc.abstractmethod
108 104 def pause(self):
109 105 pass
110 106
111 107 @abc.abstractmethod
112 108 def unpause(self):
113 109 pass
114 110
115 111 @abc.abstractmethod
116 112 def is_beating(self):
117 113 pass
@@ -1,201 +1,190 b''
1 1 """ A kernel client for in-process kernels. """
2 2
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2012 The IPython Development Team
5 #
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
8 #-----------------------------------------------------------------------------
9
10 #-----------------------------------------------------------------------------
11 # Imports
12 #-----------------------------------------------------------------------------
3 # Copyright (c) IPython Development Team.
4 # Distributed under the terms of the Modified BSD License.
13 5
14 # IPython imports
15 6 from IPython.kernel.channelsabc import (
16 7 ShellChannelABC, IOPubChannelABC,
17 8 HBChannelABC, StdInChannelABC,
18 9 )
19 10
20 # Local imports
21 11 from .socket import DummySocket
22 12
23 13 #-----------------------------------------------------------------------------
24 14 # Channel classes
25 15 #-----------------------------------------------------------------------------
26 16
27 17 class InProcessChannel(object):
28 18 """Base class for in-process channels."""
29 19 proxy_methods = []
30 20
31 21 def __init__(self, client=None):
32 22 super(InProcessChannel, self).__init__()
33 23 self.client = client
34 24 self._is_alive = False
35 25
36 26 #--------------------------------------------------------------------------
37 27 # Channel interface
38 28 #--------------------------------------------------------------------------
39 29
40 30 def is_alive(self):
41 31 return self._is_alive
42 32
43 33 def start(self):
44 34 self._is_alive = True
45 35
46 36 def stop(self):
47 37 self._is_alive = False
48 38
49 39 def call_handlers(self, msg):
50 40 """ This method is called in the main thread when a message arrives.
51 41
52 42 Subclasses should override this method to handle incoming messages.
53 43 """
54 44 raise NotImplementedError('call_handlers must be defined in a subclass.')
55 45
56 46 #--------------------------------------------------------------------------
57 47 # InProcessChannel interface
58 48 #--------------------------------------------------------------------------
59 49
60 50 def call_handlers_later(self, *args, **kwds):
61 51 """ Call the message handlers later.
62 52
63 53 The default implementation just calls the handlers immediately, but this
64 54 method exists so that GUI toolkits can defer calling the handlers until
65 55 after the event loop has run, as expected by GUI frontends.
66 56 """
67 57 self.call_handlers(*args, **kwds)
68 58
69 59 def process_events(self):
70 60 """ Process any pending GUI events.
71 61
72 62 This method will be never be called from a frontend without an event
73 63 loop (e.g., a terminal frontend).
74 64 """
75 65 raise NotImplementedError
76 66
77 67
78 68 class InProcessShellChannel(InProcessChannel):
79 69 """See `IPython.kernel.channels.ShellChannel` for docstrings."""
80 70
81 71 # flag for whether execute requests should be allowed to call raw_input
82 72 allow_stdin = True
83 73 proxy_methods = [
84 74 'execute',
85 75 'complete',
86 76 'object_info',
87 77 'history',
88 78 'shutdown',
89 79 'kernel_info',
90 80 ]
91 81
92 82 #--------------------------------------------------------------------------
93 83 # ShellChannel interface
94 84 #--------------------------------------------------------------------------
95 85
96 86 def execute(self, code, silent=False, store_history=True,
97 user_variables=[], user_expressions={}, allow_stdin=None):
87 user_expressions={}, allow_stdin=None):
98 88 if allow_stdin is None:
99 89 allow_stdin = self.allow_stdin
100 90 content = dict(code=code, silent=silent, store_history=store_history,
101 user_variables=user_variables,
102 91 user_expressions=user_expressions,
103 92 allow_stdin=allow_stdin)
104 93 msg = self.client.session.msg('execute_request', content)
105 94 self._dispatch_to_kernel(msg)
106 95 return msg['header']['msg_id']
107 96
108 97 def complete(self, text, line, cursor_pos, block=None):
109 98 content = dict(text=text, line=line, block=block, cursor_pos=cursor_pos)
110 99 msg = self.client.session.msg('complete_request', content)
111 100 self._dispatch_to_kernel(msg)
112 101 return msg['header']['msg_id']
113 102
114 103 def object_info(self, oname, detail_level=0):
115 104 content = dict(oname=oname, detail_level=detail_level)
116 105 msg = self.client.session.msg('object_info_request', content)
117 106 self._dispatch_to_kernel(msg)
118 107 return msg['header']['msg_id']
119 108
120 109 def history(self, raw=True, output=False, hist_access_type='range', **kwds):
121 110 content = dict(raw=raw, output=output,
122 111 hist_access_type=hist_access_type, **kwds)
123 112 msg = self.client.session.msg('history_request', content)
124 113 self._dispatch_to_kernel(msg)
125 114 return msg['header']['msg_id']
126 115
127 116 def shutdown(self, restart=False):
128 117 # FIXME: What to do here?
129 118 raise NotImplementedError('Cannot shutdown in-process kernel')
130 119
131 120 def kernel_info(self):
132 121 """Request kernel info."""
133 122 msg = self.client.session.msg('kernel_info_request')
134 123 self._dispatch_to_kernel(msg)
135 124 return msg['header']['msg_id']
136 125
137 126 #--------------------------------------------------------------------------
138 127 # Protected interface
139 128 #--------------------------------------------------------------------------
140 129
141 130 def _dispatch_to_kernel(self, msg):
142 131 """ Send a message to the kernel and handle a reply.
143 132 """
144 133 kernel = self.client.kernel
145 134 if kernel is None:
146 135 raise RuntimeError('Cannot send request. No kernel exists.')
147 136
148 137 stream = DummySocket()
149 138 self.client.session.send(stream, msg)
150 139 msg_parts = stream.recv_multipart()
151 140 kernel.dispatch_shell(stream, msg_parts)
152 141
153 142 idents, reply_msg = self.client.session.recv(stream, copy=False)
154 143 self.call_handlers_later(reply_msg)
155 144
156 145
157 146 class InProcessIOPubChannel(InProcessChannel):
158 147 """See `IPython.kernel.channels.IOPubChannel` for docstrings."""
159 148
160 149 def flush(self, timeout=1.0):
161 150 pass
162 151
163 152
164 153 class InProcessStdInChannel(InProcessChannel):
165 154 """See `IPython.kernel.channels.StdInChannel` for docstrings."""
166 155
167 156 proxy_methods = ['input']
168 157
169 158 def input(self, string):
170 159 kernel = self.client.kernel
171 160 if kernel is None:
172 161 raise RuntimeError('Cannot send input reply. No kernel exists.')
173 162 kernel.raw_input_str = string
174 163
175 164
176 165 class InProcessHBChannel(InProcessChannel):
177 166 """See `IPython.kernel.channels.HBChannel` for docstrings."""
178 167
179 168 time_to_dead = 3.0
180 169
181 170 def __init__(self, *args, **kwds):
182 171 super(InProcessHBChannel, self).__init__(*args, **kwds)
183 172 self._pause = True
184 173
185 174 def pause(self):
186 175 self._pause = True
187 176
188 177 def unpause(self):
189 178 self._pause = False
190 179
191 180 def is_beating(self):
192 181 return not self._pause
193 182
194 183 #-----------------------------------------------------------------------------
195 184 # ABC Registration
196 185 #-----------------------------------------------------------------------------
197 186
198 187 ShellChannelABC.register(InProcessShellChannel)
199 188 IOPubChannelABC.register(InProcessIOPubChannel)
200 189 HBChannelABC.register(InProcessHBChannel)
201 190 StdInChannelABC.register(InProcessStdInChannel)
@@ -1,442 +1,420 b''
1 1 """Test suite for our zeromq-based message specification."""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 import re
7 7 from distutils.version import LooseVersion as V
8 8 from subprocess import PIPE
9 9 try:
10 10 from queue import Empty # Py 3
11 11 except ImportError:
12 12 from Queue import Empty # Py 2
13 13
14 14 import nose.tools as nt
15 15
16 16 from IPython.kernel import KernelManager
17 17
18 18 from IPython.utils.traitlets import (
19 19 HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum, Any,
20 20 )
21 21 from IPython.utils.py3compat import string_types, iteritems
22 22
23 23 from .utils import TIMEOUT, start_global_kernel, flush_channels, execute
24 24
25 25 #-----------------------------------------------------------------------------
26 26 # Globals
27 27 #-----------------------------------------------------------------------------
28 28 KC = None
29 29
30 30 def setup():
31 31 global KC
32 32 KC = start_global_kernel()
33 33
34 34 #-----------------------------------------------------------------------------
35 35 # Message Spec References
36 36 #-----------------------------------------------------------------------------
37 37
38 38 class Reference(HasTraits):
39 39
40 40 """
41 41 Base class for message spec specification testing.
42 42
43 43 This class is the core of the message specification test. The
44 44 idea is that child classes implement trait attributes for each
45 45 message keys, so that message keys can be tested against these
46 46 traits using :meth:`check` method.
47 47
48 48 """
49 49
50 50 def check(self, d):
51 51 """validate a dict against our traits"""
52 52 for key in self.trait_names():
53 53 nt.assert_in(key, d)
54 54 # FIXME: always allow None, probably not a good idea
55 55 if d[key] is None:
56 56 continue
57 57 try:
58 58 setattr(self, key, d[key])
59 59 except TraitError as e:
60 60 assert False, str(e)
61 61
62 62 class Version(Unicode):
63 63 def validate(self, obj, value):
64 64 min_version = self.default_value
65 65 if V(value) < V(min_version):
66 66 raise TraitError("bad version: %s < %s" % (value, min_version))
67 67
68 68 class RMessage(Reference):
69 69 msg_id = Unicode()
70 70 msg_type = Unicode()
71 71 header = Dict()
72 72 parent_header = Dict()
73 73 content = Dict()
74 74
75 75 def check(self, d):
76 76 super(RMessage, self).check(d)
77 77 RHeader().check(self.header)
78 if self.parent_header:
78 79 RHeader().check(self.parent_header)
79 80
80 81 class RHeader(Reference):
81 82 msg_id = Unicode()
82 83 msg_type = Unicode()
83 84 session = Unicode()
84 85 username = Unicode()
85 86 version = Version('5.0')
86 87
87 88
88 89 class ExecuteReply(Reference):
89 90 execution_count = Integer()
90 91 status = Enum((u'ok', u'error'))
91 92
92 93 def check(self, d):
93 94 Reference.check(self, d)
94 95 if d['status'] == 'ok':
95 96 ExecuteReplyOkay().check(d)
96 97 elif d['status'] == 'error':
97 98 ExecuteReplyError().check(d)
98 99
99 100
100 101 class ExecuteReplyOkay(Reference):
101 102 payload = List(Dict)
102 user_variables = Dict()
103 103 user_expressions = Dict()
104 104
105 105
106 106 class ExecuteReplyError(Reference):
107 107 ename = Unicode()
108 108 evalue = Unicode()
109 109 traceback = List(Unicode)
110 110
111 111
112 112 class OInfoReply(Reference):
113 113 name = Unicode()
114 114 found = Bool()
115 115 ismagic = Bool()
116 116 isalias = Bool()
117 117 namespace = Enum((u'builtin', u'magics', u'alias', u'Interactive'))
118 118 type_name = Unicode()
119 119 string_form = Unicode()
120 120 base_class = Unicode()
121 121 length = Integer()
122 122 file = Unicode()
123 123 definition = Unicode()
124 124 argspec = Dict()
125 125 init_definition = Unicode()
126 126 docstring = Unicode()
127 127 init_docstring = Unicode()
128 128 class_docstring = Unicode()
129 129 call_def = Unicode()
130 130 call_docstring = Unicode()
131 131 source = Unicode()
132 132
133 133 def check(self, d):
134 134 super(OInfoReply, self).check(d)
135 135 if d['argspec'] is not None:
136 136 ArgSpec().check(d['argspec'])
137 137
138 138
139 139 class ArgSpec(Reference):
140 140 args = List(Unicode)
141 141 varargs = Unicode()
142 142 varkw = Unicode()
143 143 defaults = List()
144 144
145 145
146 146 class Status(Reference):
147 147 execution_state = Enum((u'busy', u'idle', u'starting'))
148 148
149 149
150 150 class CompleteReply(Reference):
151 151 matches = List(Unicode)
152 152
153 153
154 154 class KernelInfoReply(Reference):
155 155 protocol_version = Version('5.0')
156 156 ipython_version = Version('2.0')
157 157 language_version = Version('2.7')
158 158 language = Unicode()
159 159
160 160
161 161 # IOPub messages
162 162
163 163 class ExecuteInput(Reference):
164 164 code = Unicode()
165 165 execution_count = Integer()
166 166
167 167
168 168 Error = ExecuteReplyError
169 169
170 170
171 171 class Stream(Reference):
172 172 name = Enum((u'stdout', u'stderr'))
173 173 data = Unicode()
174 174
175 175
176 176 mime_pat = re.compile(r'\w+/\w+')
177 177
178 178 class DisplayData(Reference):
179 179 source = Unicode()
180 180 metadata = Dict()
181 181 data = Dict()
182 182 def _data_changed(self, name, old, new):
183 183 for k,v in iteritems(new):
184 184 assert mime_pat.match(k)
185 185 nt.assert_is_instance(v, string_types)
186 186
187 187
188 188 class ExecuteResult(Reference):
189 189 execution_count = Integer()
190 190 data = Dict()
191 191 def _data_changed(self, name, old, new):
192 192 for k,v in iteritems(new):
193 193 assert mime_pat.match(k)
194 194 nt.assert_is_instance(v, string_types)
195 195
196 196
197 197 references = {
198 198 'execute_reply' : ExecuteReply(),
199 199 'object_info_reply' : OInfoReply(),
200 200 'status' : Status(),
201 201 'complete_reply' : CompleteReply(),
202 202 'kernel_info_reply': KernelInfoReply(),
203 203 'execute_input' : ExecuteInput(),
204 204 'execute_result' : ExecuteResult(),
205 205 'error' : Error(),
206 206 'stream' : Stream(),
207 207 'display_data' : DisplayData(),
208 208 'header' : RHeader(),
209 209 }
210 210 """
211 211 Specifications of `content` part of the reply messages.
212 212 """
213 213
214 214
215 215 def validate_message(msg, msg_type=None, parent=None):
216 216 """validate a message
217 217
218 218 This is a generator, and must be iterated through to actually
219 219 trigger each test.
220 220
221 221 If msg_type and/or parent are given, the msg_type and/or parent msg_id
222 222 are compared with the given values.
223 223 """
224 224 RMessage().check(msg)
225 225 if msg_type:
226 226 nt.assert_equal(msg['msg_type'], msg_type)
227 227 if parent:
228 228 nt.assert_equal(msg['parent_header']['msg_id'], parent)
229 229 content = msg['content']
230 230 ref = references[msg['msg_type']]
231 231 ref.check(content)
232 232
233 233
234 234 #-----------------------------------------------------------------------------
235 235 # Tests
236 236 #-----------------------------------------------------------------------------
237 237
238 238 # Shell channel
239 239
240 240 def test_execute():
241 241 flush_channels()
242 242
243 243 msg_id = KC.execute(code='x=1')
244 244 reply = KC.get_shell_msg(timeout=TIMEOUT)
245 245 validate_message(reply, 'execute_reply', msg_id)
246 246
247 247
248 248 def test_execute_silent():
249 249 flush_channels()
250 250 msg_id, reply = execute(code='x=1', silent=True)
251 251
252 252 # flush status=idle
253 253 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
254 254 validate_message(status, 'status', msg_id)
255 255 nt.assert_equal(status['content']['execution_state'], 'idle')
256 256
257 257 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
258 258 count = reply['execution_count']
259 259
260 260 msg_id, reply = execute(code='x=2', silent=True)
261 261
262 262 # flush status=idle
263 263 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
264 264 validate_message(status, 'status', msg_id)
265 265 nt.assert_equal(status['content']['execution_state'], 'idle')
266 266
267 267 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
268 268 count_2 = reply['execution_count']
269 269 nt.assert_equal(count_2, count)
270 270
271 271
272 272 def test_execute_error():
273 273 flush_channels()
274 274
275 275 msg_id, reply = execute(code='1/0')
276 276 nt.assert_equal(reply['status'], 'error')
277 277 nt.assert_equal(reply['ename'], 'ZeroDivisionError')
278 278
279 279 error = KC.iopub_channel.get_msg(timeout=TIMEOUT)
280 280 validate_message(error, 'error', msg_id)
281 281
282 282
283 283 def test_execute_inc():
284 284 """execute request should increment execution_count"""
285 285 flush_channels()
286 286
287 287 msg_id, reply = execute(code='x=1')
288 288 count = reply['execution_count']
289 289
290 290 flush_channels()
291 291
292 292 msg_id, reply = execute(code='x=2')
293 293 count_2 = reply['execution_count']
294 294 nt.assert_equal(count_2, count+1)
295 295
296 296
297 def test_user_variables():
298 flush_channels()
299
300 msg_id, reply = execute(code='x=1', user_variables=['x'])
301 user_variables = reply['user_variables']
302 nt.assert_equal(user_variables, {u'x': {
303 u'status': u'ok',
304 u'data': {u'text/plain': u'1'},
305 u'metadata': {},
306 }})
307
308
309 def test_user_variables_fail():
310 flush_channels()
311
312 msg_id, reply = execute(code='x=1', user_variables=['nosuchname'])
313 user_variables = reply['user_variables']
314 foo = user_variables['nosuchname']
315 nt.assert_equal(foo['status'], 'error')
316 nt.assert_equal(foo['ename'], 'KeyError')
317
318
319 297 def test_user_expressions():
320 298 flush_channels()
321 299
322 300 msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
323 301 user_expressions = reply['user_expressions']
324 302 nt.assert_equal(user_expressions, {u'foo': {
325 303 u'status': u'ok',
326 304 u'data': {u'text/plain': u'2'},
327 305 u'metadata': {},
328 306 }})
329 307
330 308
331 309 def test_user_expressions_fail():
332 310 flush_channels()
333 311
334 312 msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
335 313 user_expressions = reply['user_expressions']
336 314 foo = user_expressions['foo']
337 315 nt.assert_equal(foo['status'], 'error')
338 316 nt.assert_equal(foo['ename'], 'NameError')
339 317
340 318
341 319 def test_oinfo():
342 320 flush_channels()
343 321
344 322 msg_id = KC.object_info('a')
345 323 reply = KC.get_shell_msg(timeout=TIMEOUT)
346 324 validate_message(reply, 'object_info_reply', msg_id)
347 325
348 326
349 327 def test_oinfo_found():
350 328 flush_channels()
351 329
352 330 msg_id, reply = execute(code='a=5')
353 331
354 332 msg_id = KC.object_info('a')
355 333 reply = KC.get_shell_msg(timeout=TIMEOUT)
356 334 validate_message(reply, 'object_info_reply', msg_id)
357 335 content = reply['content']
358 336 assert content['found']
359 337 argspec = content['argspec']
360 338 nt.assert_is(argspec, None)
361 339
362 340
363 341 def test_oinfo_detail():
364 342 flush_channels()
365 343
366 344 msg_id, reply = execute(code='ip=get_ipython()')
367 345
368 346 msg_id = KC.object_info('ip.object_inspect', detail_level=2)
369 347 reply = KC.get_shell_msg(timeout=TIMEOUT)
370 348 validate_message(reply, 'object_info_reply', msg_id)
371 349 content = reply['content']
372 350 assert content['found']
373 351 argspec = content['argspec']
374 352 nt.assert_is_instance(argspec, dict, "expected non-empty argspec dict, got %r" % argspec)
375 353 nt.assert_equal(argspec['defaults'], [0])
376 354
377 355
378 356 def test_oinfo_not_found():
379 357 flush_channels()
380 358
381 359 msg_id = KC.object_info('dne')
382 360 reply = KC.get_shell_msg(timeout=TIMEOUT)
383 361 validate_message(reply, 'object_info_reply', msg_id)
384 362 content = reply['content']
385 363 nt.assert_false(content['found'])
386 364
387 365
388 366 def test_complete():
389 367 flush_channels()
390 368
391 369 msg_id, reply = execute(code="alpha = albert = 5")
392 370
393 371 msg_id = KC.complete('al', 'al', 2)
394 372 reply = KC.get_shell_msg(timeout=TIMEOUT)
395 373 validate_message(reply, 'complete_reply', msg_id)
396 374 matches = reply['content']['matches']
397 375 for name in ('alpha', 'albert'):
398 376 nt.assert_in(name, matches)
399 377
400 378
401 379 def test_kernel_info_request():
402 380 flush_channels()
403 381
404 382 msg_id = KC.kernel_info()
405 383 reply = KC.get_shell_msg(timeout=TIMEOUT)
406 384 validate_message(reply, 'kernel_info_reply', msg_id)
407 385
408 386
409 387 def test_single_payload():
410 388 flush_channels()
411 389 msg_id, reply = execute(code="for i in range(3):\n"+
412 390 " x=range?\n")
413 391 payload = reply['payload']
414 392 next_input_pls = [pl for pl in payload if pl["source"] == "set_next_input"]
415 393 nt.assert_equal(len(next_input_pls), 1)
416 394
417 395
418 396 # IOPub channel
419 397
420 398
421 399 def test_stream():
422 400 flush_channels()
423 401
424 402 msg_id, reply = execute("print('hi')")
425 403
426 404 stdout = KC.iopub_channel.get_msg(timeout=TIMEOUT)
427 405 validate_message(stdout, 'stream', msg_id)
428 406 content = stdout['content']
429 407 nt.assert_equal(content['name'], u'stdout')
430 408 nt.assert_equal(content['data'], u'hi\n')
431 409
432 410
433 411 def test_display_data():
434 412 flush_channels()
435 413
436 414 msg_id, reply = execute("from IPython.core.display import display; display(1)")
437 415
438 416 display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
439 417 validate_message(display, 'display_data', parent=msg_id)
440 418 data = display['content']['data']
441 419 nt.assert_equal(data['text/plain'], u'1')
442 420
@@ -1,797 +1,793 b''
1 1 #!/usr/bin/env python
2 2 """An interactive kernel that talks to frontends over 0MQ."""
3 3
4 4 # Copyright (c) IPython Development Team.
5 5 # Distributed under the terms of the Modified BSD License.
6 6
7 7 from __future__ import print_function
8 8
9 9 import sys
10 10 import time
11 11 import traceback
12 12 import logging
13 13 import uuid
14 14
15 15 from datetime import datetime
16 16 from signal import (
17 17 signal, default_int_handler, SIGINT
18 18 )
19 19
20 20 import zmq
21 21 from zmq.eventloop import ioloop
22 22 from zmq.eventloop.zmqstream import ZMQStream
23 23
24 24 from IPython.config.configurable import Configurable
25 25 from IPython.core.error import StdinNotImplementedError
26 26 from IPython.core import release
27 27 from IPython.utils import py3compat
28 28 from IPython.utils.py3compat import builtin_mod, unicode_type, string_types
29 29 from IPython.utils.jsonutil import json_clean
30 30 from IPython.utils.traitlets import (
31 31 Any, Instance, Float, Dict, List, Set, Integer, Unicode,
32 32 Type, Bool,
33 33 )
34 34
35 35 from .serialize import serialize_object, unpack_apply_message
36 36 from .session import Session
37 37 from .zmqshell import ZMQInteractiveShell
38 38
39 39
40 40 #-----------------------------------------------------------------------------
41 41 # Main kernel class
42 42 #-----------------------------------------------------------------------------
43 43
44 44 protocol_version = release.kernel_protocol_version
45 45 ipython_version = release.version
46 46 language_version = sys.version.split()[0]
47 47
48 48
49 49 class Kernel(Configurable):
50 50
51 51 #---------------------------------------------------------------------------
52 52 # Kernel interface
53 53 #---------------------------------------------------------------------------
54 54
55 55 # attribute to override with a GUI
56 56 eventloop = Any(None)
57 57 def _eventloop_changed(self, name, old, new):
58 58 """schedule call to eventloop from IOLoop"""
59 59 loop = ioloop.IOLoop.instance()
60 60 loop.add_callback(self.enter_eventloop)
61 61
62 62 shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
63 63 shell_class = Type(ZMQInteractiveShell)
64 64
65 65 session = Instance(Session)
66 66 profile_dir = Instance('IPython.core.profiledir.ProfileDir')
67 67 shell_streams = List()
68 68 control_stream = Instance(ZMQStream)
69 69 iopub_socket = Instance(zmq.Socket)
70 70 stdin_socket = Instance(zmq.Socket)
71 71 log = Instance(logging.Logger)
72 72
73 73 user_module = Any()
74 74 def _user_module_changed(self, name, old, new):
75 75 if self.shell is not None:
76 76 self.shell.user_module = new
77 77
78 78 user_ns = Instance(dict, args=None, allow_none=True)
79 79 def _user_ns_changed(self, name, old, new):
80 80 if self.shell is not None:
81 81 self.shell.user_ns = new
82 82 self.shell.init_user_ns()
83 83
84 84 # identities:
85 85 int_id = Integer(-1)
86 86 ident = Unicode()
87 87
88 88 def _ident_default(self):
89 89 return unicode_type(uuid.uuid4())
90 90
91 91 # Private interface
92 92
93 93 _darwin_app_nap = Bool(True, config=True,
94 94 help="""Whether to use appnope for compatiblity with OS X App Nap.
95 95
96 96 Only affects OS X >= 10.9.
97 97 """
98 98 )
99 99
100 100 # Time to sleep after flushing the stdout/err buffers in each execute
101 101 # cycle. While this introduces a hard limit on the minimal latency of the
102 102 # execute cycle, it helps prevent output synchronization problems for
103 103 # clients.
104 104 # Units are in seconds. The minimum zmq latency on local host is probably
105 105 # ~150 microseconds, set this to 500us for now. We may need to increase it
106 106 # a little if it's not enough after more interactive testing.
107 107 _execute_sleep = Float(0.0005, config=True)
108 108
109 109 # Frequency of the kernel's event loop.
110 110 # Units are in seconds, kernel subclasses for GUI toolkits may need to
111 111 # adapt to milliseconds.
112 112 _poll_interval = Float(0.05, config=True)
113 113
114 114 # If the shutdown was requested over the network, we leave here the
115 115 # necessary reply message so it can be sent by our registered atexit
116 116 # handler. This ensures that the reply is only sent to clients truly at
117 117 # the end of our shutdown process (which happens after the underlying
118 118 # IPython shell's own shutdown).
119 119 _shutdown_message = None
120 120
121 121 # This is a dict of port number that the kernel is listening on. It is set
122 122 # by record_ports and used by connect_request.
123 123 _recorded_ports = Dict()
124 124
125 125 # A reference to the Python builtin 'raw_input' function.
126 126 # (i.e., __builtin__.raw_input for Python 2.7, builtins.input for Python 3)
127 127 _sys_raw_input = Any()
128 128 _sys_eval_input = Any()
129 129
130 130 # set of aborted msg_ids
131 131 aborted = Set()
132 132
133 133
134 134 def __init__(self, **kwargs):
135 135 super(Kernel, self).__init__(**kwargs)
136 136
137 137 # Initialize the InteractiveShell subclass
138 138 self.shell = self.shell_class.instance(parent=self,
139 139 profile_dir = self.profile_dir,
140 140 user_module = self.user_module,
141 141 user_ns = self.user_ns,
142 142 kernel = self,
143 143 )
144 144 self.shell.displayhook.session = self.session
145 145 self.shell.displayhook.pub_socket = self.iopub_socket
146 146 self.shell.displayhook.topic = self._topic('execute_result')
147 147 self.shell.display_pub.session = self.session
148 148 self.shell.display_pub.pub_socket = self.iopub_socket
149 149 self.shell.data_pub.session = self.session
150 150 self.shell.data_pub.pub_socket = self.iopub_socket
151 151
152 152 # TMP - hack while developing
153 153 self.shell._reply_content = None
154 154
155 155 # Build dict of handlers for message types
156 156 msg_types = [ 'execute_request', 'complete_request',
157 157 'object_info_request', 'history_request',
158 158 'kernel_info_request',
159 159 'connect_request', 'shutdown_request',
160 160 'apply_request',
161 161 ]
162 162 self.shell_handlers = {}
163 163 for msg_type in msg_types:
164 164 self.shell_handlers[msg_type] = getattr(self, msg_type)
165 165
166 166 comm_msg_types = [ 'comm_open', 'comm_msg', 'comm_close' ]
167 167 comm_manager = self.shell.comm_manager
168 168 for msg_type in comm_msg_types:
169 169 self.shell_handlers[msg_type] = getattr(comm_manager, msg_type)
170 170
171 171 control_msg_types = msg_types + [ 'clear_request', 'abort_request' ]
172 172 self.control_handlers = {}
173 173 for msg_type in control_msg_types:
174 174 self.control_handlers[msg_type] = getattr(self, msg_type)
175 175
176 176
177 177 def dispatch_control(self, msg):
178 178 """dispatch control requests"""
179 179 idents,msg = self.session.feed_identities(msg, copy=False)
180 180 try:
181 181 msg = self.session.unserialize(msg, content=True, copy=False)
182 182 except:
183 183 self.log.error("Invalid Control Message", exc_info=True)
184 184 return
185 185
186 186 self.log.debug("Control received: %s", msg)
187 187
188 188 header = msg['header']
189 189 msg_id = header['msg_id']
190 190 msg_type = header['msg_type']
191 191
192 192 handler = self.control_handlers.get(msg_type, None)
193 193 if handler is None:
194 194 self.log.error("UNKNOWN CONTROL MESSAGE TYPE: %r", msg_type)
195 195 else:
196 196 try:
197 197 handler(self.control_stream, idents, msg)
198 198 except Exception:
199 199 self.log.error("Exception in control handler:", exc_info=True)
200 200
201 201 def dispatch_shell(self, stream, msg):
202 202 """dispatch shell requests"""
203 203 # flush control requests first
204 204 if self.control_stream:
205 205 self.control_stream.flush()
206 206
207 207 idents,msg = self.session.feed_identities(msg, copy=False)
208 208 try:
209 209 msg = self.session.unserialize(msg, content=True, copy=False)
210 210 except:
211 211 self.log.error("Invalid Message", exc_info=True)
212 212 return
213 213
214 214 header = msg['header']
215 215 msg_id = header['msg_id']
216 216 msg_type = msg['header']['msg_type']
217 217
218 218 # Print some info about this message and leave a '--->' marker, so it's
219 219 # easier to trace visually the message chain when debugging. Each
220 220 # handler prints its message at the end.
221 221 self.log.debug('\n*** MESSAGE TYPE:%s***', msg_type)
222 222 self.log.debug(' Content: %s\n --->\n ', msg['content'])
223 223
224 224 if msg_id in self.aborted:
225 225 self.aborted.remove(msg_id)
226 226 # is it safe to assume a msg_id will not be resubmitted?
227 227 reply_type = msg_type.split('_')[0] + '_reply'
228 228 status = {'status' : 'aborted'}
229 229 md = {'engine' : self.ident}
230 230 md.update(status)
231 231 reply_msg = self.session.send(stream, reply_type, metadata=md,
232 232 content=status, parent=msg, ident=idents)
233 233 return
234 234
235 235 handler = self.shell_handlers.get(msg_type, None)
236 236 if handler is None:
237 237 self.log.error("UNKNOWN MESSAGE TYPE: %r", msg_type)
238 238 else:
239 239 # ensure default_int_handler during handler call
240 240 sig = signal(SIGINT, default_int_handler)
241 241 try:
242 242 handler(stream, idents, msg)
243 243 except Exception:
244 244 self.log.error("Exception in message handler:", exc_info=True)
245 245 finally:
246 246 signal(SIGINT, sig)
247 247
248 248 def enter_eventloop(self):
249 249 """enter eventloop"""
250 250 self.log.info("entering eventloop %s", self.eventloop)
251 251 for stream in self.shell_streams:
252 252 # flush any pending replies,
253 253 # which may be skipped by entering the eventloop
254 254 stream.flush(zmq.POLLOUT)
255 255 # restore default_int_handler
256 256 signal(SIGINT, default_int_handler)
257 257 while self.eventloop is not None:
258 258 try:
259 259 self.eventloop(self)
260 260 except KeyboardInterrupt:
261 261 # Ctrl-C shouldn't crash the kernel
262 262 self.log.error("KeyboardInterrupt caught in kernel")
263 263 continue
264 264 else:
265 265 # eventloop exited cleanly, this means we should stop (right?)
266 266 self.eventloop = None
267 267 break
268 268 self.log.info("exiting eventloop")
269 269
270 270 def start(self):
271 271 """register dispatchers for streams"""
272 272 self.shell.exit_now = False
273 273 if self.control_stream:
274 274 self.control_stream.on_recv(self.dispatch_control, copy=False)
275 275
276 276 def make_dispatcher(stream):
277 277 def dispatcher(msg):
278 278 return self.dispatch_shell(stream, msg)
279 279 return dispatcher
280 280
281 281 for s in self.shell_streams:
282 282 s.on_recv(make_dispatcher(s), copy=False)
283 283
284 284 # publish idle status
285 285 self._publish_status('starting')
286 286
287 287 def do_one_iteration(self):
288 288 """step eventloop just once"""
289 289 if self.control_stream:
290 290 self.control_stream.flush()
291 291 for stream in self.shell_streams:
292 292 # handle at most one request per iteration
293 293 stream.flush(zmq.POLLIN, 1)
294 294 stream.flush(zmq.POLLOUT)
295 295
296 296
297 297 def record_ports(self, ports):
298 298 """Record the ports that this kernel is using.
299 299
300 300 The creator of the Kernel instance must call this methods if they
301 301 want the :meth:`connect_request` method to return the port numbers.
302 302 """
303 303 self._recorded_ports = ports
304 304
305 305 #---------------------------------------------------------------------------
306 306 # Kernel request handlers
307 307 #---------------------------------------------------------------------------
308 308
309 309 def _make_metadata(self, other=None):
310 310 """init metadata dict, for execute/apply_reply"""
311 311 new_md = {
312 312 'dependencies_met' : True,
313 313 'engine' : self.ident,
314 314 'started': datetime.now(),
315 315 }
316 316 if other:
317 317 new_md.update(other)
318 318 return new_md
319 319
320 320 def _publish_execute_input(self, code, parent, execution_count):
321 321 """Publish the code request on the iopub stream."""
322 322
323 323 self.session.send(self.iopub_socket, u'execute_input',
324 324 {u'code':code, u'execution_count': execution_count},
325 325 parent=parent, ident=self._topic('execute_input')
326 326 )
327 327
328 328 def _publish_status(self, status, parent=None):
329 329 """send status (busy/idle) on IOPub"""
330 330 self.session.send(self.iopub_socket,
331 331 u'status',
332 332 {u'execution_state': status},
333 333 parent=parent,
334 334 ident=self._topic('status'),
335 335 )
336 336
337 337
338 338 def execute_request(self, stream, ident, parent):
339 339 """handle an execute_request"""
340 340
341 341 self._publish_status(u'busy', parent)
342 342
343 343 try:
344 344 content = parent[u'content']
345 345 code = py3compat.cast_unicode_py2(content[u'code'])
346 346 silent = content[u'silent']
347 347 store_history = content.get(u'store_history', not silent)
348 348 except:
349 349 self.log.error("Got bad msg: ")
350 350 self.log.error("%s", parent)
351 351 return
352 352
353 353 md = self._make_metadata(parent['metadata'])
354 354
355 355 shell = self.shell # we'll need this a lot here
356 356
357 357 # Replace raw_input. Note that is not sufficient to replace
358 358 # raw_input in the user namespace.
359 359 if content.get('allow_stdin', False):
360 360 raw_input = lambda prompt='': self._raw_input(prompt, ident, parent)
361 361 input = lambda prompt='': eval(raw_input(prompt))
362 362 else:
363 363 raw_input = input = lambda prompt='' : self._no_raw_input()
364 364
365 365 if py3compat.PY3:
366 366 self._sys_raw_input = builtin_mod.input
367 367 builtin_mod.input = raw_input
368 368 else:
369 369 self._sys_raw_input = builtin_mod.raw_input
370 370 self._sys_eval_input = builtin_mod.input
371 371 builtin_mod.raw_input = raw_input
372 372 builtin_mod.input = input
373 373
374 374 # Set the parent message of the display hook and out streams.
375 375 shell.set_parent(parent)
376 376
377 377 # Re-broadcast our input for the benefit of listening clients, and
378 378 # start computing output
379 379 if not silent:
380 380 self._publish_execute_input(code, parent, shell.execution_count)
381 381
382 382 reply_content = {}
383 383 # FIXME: the shell calls the exception handler itself.
384 384 shell._reply_content = None
385 385 try:
386 386 shell.run_cell(code, store_history=store_history, silent=silent)
387 387 except:
388 388 status = u'error'
389 389 # FIXME: this code right now isn't being used yet by default,
390 390 # because the run_cell() call above directly fires off exception
391 391 # reporting. This code, therefore, is only active in the scenario
392 392 # where runlines itself has an unhandled exception. We need to
393 393 # uniformize this, for all exception construction to come from a
394 394 # single location in the codbase.
395 395 etype, evalue, tb = sys.exc_info()
396 396 tb_list = traceback.format_exception(etype, evalue, tb)
397 397 reply_content.update(shell._showtraceback(etype, evalue, tb_list))
398 398 else:
399 399 status = u'ok'
400 400 finally:
401 401 # Restore raw_input.
402 402 if py3compat.PY3:
403 403 builtin_mod.input = self._sys_raw_input
404 404 else:
405 405 builtin_mod.raw_input = self._sys_raw_input
406 406 builtin_mod.input = self._sys_eval_input
407 407
408 408 reply_content[u'status'] = status
409 409
410 410 # Return the execution counter so clients can display prompts
411 411 reply_content['execution_count'] = shell.execution_count - 1
412 412
413 413 # FIXME - fish exception info out of shell, possibly left there by
414 414 # runlines. We'll need to clean up this logic later.
415 415 if shell._reply_content is not None:
416 416 reply_content.update(shell._reply_content)
417 417 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='execute')
418 418 reply_content['engine_info'] = e_info
419 419 # reset after use
420 420 shell._reply_content = None
421 421
422 422 if 'traceback' in reply_content:
423 423 self.log.info("Exception in execute request:\n%s", '\n'.join(reply_content['traceback']))
424 424
425 425
426 426 # At this point, we can tell whether the main code execution succeeded
427 # or not. If it did, we proceed to evaluate user_variables/expressions
427 # or not. If it did, we proceed to evaluate user_expressions
428 428 if reply_content['status'] == 'ok':
429 reply_content[u'user_variables'] = \
430 shell.user_variables(content.get(u'user_variables', []))
431 429 reply_content[u'user_expressions'] = \
432 430 shell.user_expressions(content.get(u'user_expressions', {}))
433 431 else:
434 # If there was an error, don't even try to compute variables or
435 # expressions
436 reply_content[u'user_variables'] = {}
432 # If there was an error, don't even try to compute expressions
437 433 reply_content[u'user_expressions'] = {}
438 434
439 435 # Payloads should be retrieved regardless of outcome, so we can both
440 436 # recover partial output (that could have been generated early in a
441 437 # block, before an error) and clear the payload system always.
442 438 reply_content[u'payload'] = shell.payload_manager.read_payload()
443 439 # Be agressive about clearing the payload because we don't want
444 440 # it to sit in memory until the next execute_request comes in.
445 441 shell.payload_manager.clear_payload()
446 442
447 443 # Flush output before sending the reply.
448 444 sys.stdout.flush()
449 445 sys.stderr.flush()
450 446 # FIXME: on rare occasions, the flush doesn't seem to make it to the
451 447 # clients... This seems to mitigate the problem, but we definitely need
452 448 # to better understand what's going on.
453 449 if self._execute_sleep:
454 450 time.sleep(self._execute_sleep)
455 451
456 452 # Send the reply.
457 453 reply_content = json_clean(reply_content)
458 454
459 455 md['status'] = reply_content['status']
460 456 if reply_content['status'] == 'error' and \
461 457 reply_content['ename'] == 'UnmetDependency':
462 458 md['dependencies_met'] = False
463 459
464 460 reply_msg = self.session.send(stream, u'execute_reply',
465 461 reply_content, parent, metadata=md,
466 462 ident=ident)
467 463
468 464 self.log.debug("%s", reply_msg)
469 465
470 466 if not silent and reply_msg['content']['status'] == u'error':
471 467 self._abort_queues()
472 468
473 469 self._publish_status(u'idle', parent)
474 470
475 471 def complete_request(self, stream, ident, parent):
476 472 txt, matches = self._complete(parent)
477 473 matches = {'matches' : matches,
478 474 'matched_text' : txt,
479 475 'status' : 'ok'}
480 476 matches = json_clean(matches)
481 477 completion_msg = self.session.send(stream, 'complete_reply',
482 478 matches, parent, ident)
483 479 self.log.debug("%s", completion_msg)
484 480
485 481 def object_info_request(self, stream, ident, parent):
486 482 content = parent['content']
487 483 object_info = self.shell.object_inspect(content['oname'],
488 484 detail_level = content.get('detail_level', 0)
489 485 )
490 486 # Before we send this object over, we scrub it for JSON usage
491 487 oinfo = json_clean(object_info)
492 488 msg = self.session.send(stream, 'object_info_reply',
493 489 oinfo, parent, ident)
494 490 self.log.debug("%s", msg)
495 491
496 492 def history_request(self, stream, ident, parent):
497 493 # We need to pull these out, as passing **kwargs doesn't work with
498 494 # unicode keys before Python 2.6.5.
499 495 hist_access_type = parent['content']['hist_access_type']
500 496 raw = parent['content']['raw']
501 497 output = parent['content']['output']
502 498 if hist_access_type == 'tail':
503 499 n = parent['content']['n']
504 500 hist = self.shell.history_manager.get_tail(n, raw=raw, output=output,
505 501 include_latest=True)
506 502
507 503 elif hist_access_type == 'range':
508 504 session = parent['content']['session']
509 505 start = parent['content']['start']
510 506 stop = parent['content']['stop']
511 507 hist = self.shell.history_manager.get_range(session, start, stop,
512 508 raw=raw, output=output)
513 509
514 510 elif hist_access_type == 'search':
515 511 n = parent['content'].get('n')
516 512 unique = parent['content'].get('unique', False)
517 513 pattern = parent['content']['pattern']
518 514 hist = self.shell.history_manager.search(
519 515 pattern, raw=raw, output=output, n=n, unique=unique)
520 516
521 517 else:
522 518 hist = []
523 519 hist = list(hist)
524 520 content = {'history' : hist}
525 521 content = json_clean(content)
526 522 msg = self.session.send(stream, 'history_reply',
527 523 content, parent, ident)
528 524 self.log.debug("Sending history reply with %i entries", len(hist))
529 525
530 526 def connect_request(self, stream, ident, parent):
531 527 if self._recorded_ports is not None:
532 528 content = self._recorded_ports.copy()
533 529 else:
534 530 content = {}
535 531 msg = self.session.send(stream, 'connect_reply',
536 532 content, parent, ident)
537 533 self.log.debug("%s", msg)
538 534
539 535 def kernel_info_request(self, stream, ident, parent):
540 536 vinfo = {
541 537 'protocol_version': protocol_version,
542 538 'ipython_version': ipython_version,
543 539 'language_version': language_version,
544 540 'language': 'python',
545 541 }
546 542 msg = self.session.send(stream, 'kernel_info_reply',
547 543 vinfo, parent, ident)
548 544 self.log.debug("%s", msg)
549 545
550 546 def shutdown_request(self, stream, ident, parent):
551 547 self.shell.exit_now = True
552 548 content = dict(status='ok')
553 549 content.update(parent['content'])
554 550 self.session.send(stream, u'shutdown_reply', content, parent, ident=ident)
555 551 # same content, but different msg_id for broadcasting on IOPub
556 552 self._shutdown_message = self.session.msg(u'shutdown_reply',
557 553 content, parent
558 554 )
559 555
560 556 self._at_shutdown()
561 557 # call sys.exit after a short delay
562 558 loop = ioloop.IOLoop.instance()
563 559 loop.add_timeout(time.time()+0.1, loop.stop)
564 560
565 561 #---------------------------------------------------------------------------
566 562 # Engine methods
567 563 #---------------------------------------------------------------------------
568 564
569 565 def apply_request(self, stream, ident, parent):
570 566 try:
571 567 content = parent[u'content']
572 568 bufs = parent[u'buffers']
573 569 msg_id = parent['header']['msg_id']
574 570 except:
575 571 self.log.error("Got bad msg: %s", parent, exc_info=True)
576 572 return
577 573
578 574 self._publish_status(u'busy', parent)
579 575
580 576 # Set the parent message of the display hook and out streams.
581 577 shell = self.shell
582 578 shell.set_parent(parent)
583 579
584 580 # execute_input_msg = self.session.msg(u'execute_input',{u'code':code}, parent=parent)
585 581 # self.iopub_socket.send(execute_input_msg)
586 582 # self.session.send(self.iopub_socket, u'execute_input', {u'code':code},parent=parent)
587 583 md = self._make_metadata(parent['metadata'])
588 584 try:
589 585 working = shell.user_ns
590 586
591 587 prefix = "_"+str(msg_id).replace("-","")+"_"
592 588
593 589 f,args,kwargs = unpack_apply_message(bufs, working, copy=False)
594 590
595 591 fname = getattr(f, '__name__', 'f')
596 592
597 593 fname = prefix+"f"
598 594 argname = prefix+"args"
599 595 kwargname = prefix+"kwargs"
600 596 resultname = prefix+"result"
601 597
602 598 ns = { fname : f, argname : args, kwargname : kwargs , resultname : None }
603 599 # print ns
604 600 working.update(ns)
605 601 code = "%s = %s(*%s,**%s)" % (resultname, fname, argname, kwargname)
606 602 try:
607 603 exec(code, shell.user_global_ns, shell.user_ns)
608 604 result = working.get(resultname)
609 605 finally:
610 606 for key in ns:
611 607 working.pop(key)
612 608
613 609 result_buf = serialize_object(result,
614 610 buffer_threshold=self.session.buffer_threshold,
615 611 item_threshold=self.session.item_threshold,
616 612 )
617 613
618 614 except:
619 615 # invoke IPython traceback formatting
620 616 shell.showtraceback()
621 617 # FIXME - fish exception info out of shell, possibly left there by
622 618 # run_code. We'll need to clean up this logic later.
623 619 reply_content = {}
624 620 if shell._reply_content is not None:
625 621 reply_content.update(shell._reply_content)
626 622 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method='apply')
627 623 reply_content['engine_info'] = e_info
628 624 # reset after use
629 625 shell._reply_content = None
630 626
631 627 self.session.send(self.iopub_socket, u'error', reply_content, parent=parent,
632 628 ident=self._topic('error'))
633 629 self.log.info("Exception in apply request:\n%s", '\n'.join(reply_content['traceback']))
634 630 result_buf = []
635 631
636 632 if reply_content['ename'] == 'UnmetDependency':
637 633 md['dependencies_met'] = False
638 634 else:
639 635 reply_content = {'status' : 'ok'}
640 636
641 637 # put 'ok'/'error' status in header, for scheduler introspection:
642 638 md['status'] = reply_content['status']
643 639
644 640 # flush i/o
645 641 sys.stdout.flush()
646 642 sys.stderr.flush()
647 643
648 644 reply_msg = self.session.send(stream, u'apply_reply', reply_content,
649 645 parent=parent, ident=ident,buffers=result_buf, metadata=md)
650 646
651 647 self._publish_status(u'idle', parent)
652 648
653 649 #---------------------------------------------------------------------------
654 650 # Control messages
655 651 #---------------------------------------------------------------------------
656 652
657 653 def abort_request(self, stream, ident, parent):
658 654 """abort a specifig msg by id"""
659 655 msg_ids = parent['content'].get('msg_ids', None)
660 656 if isinstance(msg_ids, string_types):
661 657 msg_ids = [msg_ids]
662 658 if not msg_ids:
663 659 self.abort_queues()
664 660 for mid in msg_ids:
665 661 self.aborted.add(str(mid))
666 662
667 663 content = dict(status='ok')
668 664 reply_msg = self.session.send(stream, 'abort_reply', content=content,
669 665 parent=parent, ident=ident)
670 666 self.log.debug("%s", reply_msg)
671 667
672 668 def clear_request(self, stream, idents, parent):
673 669 """Clear our namespace."""
674 670 self.shell.reset(False)
675 671 msg = self.session.send(stream, 'clear_reply', ident=idents, parent=parent,
676 672 content = dict(status='ok'))
677 673
678 674
679 675 #---------------------------------------------------------------------------
680 676 # Protected interface
681 677 #---------------------------------------------------------------------------
682 678
683 679 def _wrap_exception(self, method=None):
684 680 # import here, because _wrap_exception is only used in parallel,
685 681 # and parallel has higher min pyzmq version
686 682 from IPython.parallel.error import wrap_exception
687 683 e_info = dict(engine_uuid=self.ident, engine_id=self.int_id, method=method)
688 684 content = wrap_exception(e_info)
689 685 return content
690 686
691 687 def _topic(self, topic):
692 688 """prefixed topic for IOPub messages"""
693 689 if self.int_id >= 0:
694 690 base = "engine.%i" % self.int_id
695 691 else:
696 692 base = "kernel.%s" % self.ident
697 693
698 694 return py3compat.cast_bytes("%s.%s" % (base, topic))
699 695
700 696 def _abort_queues(self):
701 697 for stream in self.shell_streams:
702 698 if stream:
703 699 self._abort_queue(stream)
704 700
705 701 def _abort_queue(self, stream):
706 702 poller = zmq.Poller()
707 703 poller.register(stream.socket, zmq.POLLIN)
708 704 while True:
709 705 idents,msg = self.session.recv(stream, zmq.NOBLOCK, content=True)
710 706 if msg is None:
711 707 return
712 708
713 709 self.log.info("Aborting:")
714 710 self.log.info("%s", msg)
715 711 msg_type = msg['header']['msg_type']
716 712 reply_type = msg_type.split('_')[0] + '_reply'
717 713
718 714 status = {'status' : 'aborted'}
719 715 md = {'engine' : self.ident}
720 716 md.update(status)
721 717 reply_msg = self.session.send(stream, reply_type, metadata=md,
722 718 content=status, parent=msg, ident=idents)
723 719 self.log.debug("%s", reply_msg)
724 720 # We need to wait a bit for requests to come in. This can probably
725 721 # be set shorter for true asynchronous clients.
726 722 poller.poll(50)
727 723
728 724
729 725 def _no_raw_input(self):
730 726 """Raise StdinNotImplentedError if active frontend doesn't support
731 727 stdin."""
732 728 raise StdinNotImplementedError("raw_input was called, but this "
733 729 "frontend does not support stdin.")
734 730
735 731 def _raw_input(self, prompt, ident, parent):
736 732 # Flush output before making the request.
737 733 sys.stderr.flush()
738 734 sys.stdout.flush()
739 735 # flush the stdin socket, to purge stale replies
740 736 while True:
741 737 try:
742 738 self.stdin_socket.recv_multipart(zmq.NOBLOCK)
743 739 except zmq.ZMQError as e:
744 740 if e.errno == zmq.EAGAIN:
745 741 break
746 742 else:
747 743 raise
748 744
749 745 # Send the input request.
750 746 content = json_clean(dict(prompt=prompt))
751 747 self.session.send(self.stdin_socket, u'input_request', content, parent,
752 748 ident=ident)
753 749
754 750 # Await a response.
755 751 while True:
756 752 try:
757 753 ident, reply = self.session.recv(self.stdin_socket, 0)
758 754 except Exception:
759 755 self.log.warn("Invalid Message:", exc_info=True)
760 756 except KeyboardInterrupt:
761 757 # re-raise KeyboardInterrupt, to truncate traceback
762 758 raise KeyboardInterrupt
763 759 else:
764 760 break
765 761 try:
766 762 value = py3compat.unicode_to_str(reply['content']['value'])
767 763 except:
768 764 self.log.error("Got bad raw_input reply: ")
769 765 self.log.error("%s", parent)
770 766 value = ''
771 767 if value == '\x04':
772 768 # EOF
773 769 raise EOFError
774 770 return value
775 771
776 772 def _complete(self, msg):
777 773 c = msg['content']
778 774 try:
779 775 cpos = int(c['cursor_pos'])
780 776 except:
781 777 # If we don't get something that we can convert to an integer, at
782 778 # least attempt the completion guessing the cursor is at the end of
783 779 # the text, if there's any, and otherwise of the line
784 780 cpos = len(c['text'])
785 781 if cpos==0:
786 782 cpos = len(c['line'])
787 783 return self.shell.complete(c['text'], c['line'], cpos)
788 784
789 785 def _at_shutdown(self):
790 786 """Actions taken at shutdown by the kernel, called by python's atexit.
791 787 """
792 788 # io.rprint("Kernel at_shutdown") # dbg
793 789 if self._shutdown_message is not None:
794 790 self.session.send(self.iopub_socket, self._shutdown_message, ident=self._topic('shutdown'))
795 791 self.log.debug("%s", self._shutdown_message)
796 792 [ s.flush(zmq.POLLOUT) for s in self.shell_streams ]
797 793
@@ -1,1863 +1,1863 b''
1 1 """A semi-synchronous Client for IPython parallel"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 from __future__ import print_function
7 7
8 8 import os
9 9 import json
10 10 import sys
11 11 from threading import Thread, Event
12 12 import time
13 13 import warnings
14 14 from datetime import datetime
15 15 from getpass import getpass
16 16 from pprint import pprint
17 17
18 18 pjoin = os.path.join
19 19
20 20 import zmq
21 21
22 22 from IPython.config.configurable import MultipleInstanceError
23 23 from IPython.core.application import BaseIPythonApplication
24 24 from IPython.core.profiledir import ProfileDir, ProfileDirError
25 25
26 26 from IPython.utils.capture import RichOutput
27 27 from IPython.utils.coloransi import TermColors
28 28 from IPython.utils.jsonutil import rekey, extract_dates, parse_date
29 29 from IPython.utils.localinterfaces import localhost, is_local_ip
30 30 from IPython.utils.path import get_ipython_dir
31 31 from IPython.utils.py3compat import cast_bytes, string_types, xrange, iteritems
32 32 from IPython.utils.traitlets import (HasTraits, Integer, Instance, Unicode,
33 33 Dict, List, Bool, Set, Any)
34 34 from IPython.external.decorator import decorator
35 35 from IPython.external.ssh import tunnel
36 36
37 37 from IPython.parallel import Reference
38 38 from IPython.parallel import error
39 39 from IPython.parallel import util
40 40
41 41 from IPython.kernel.zmq.session import Session, Message
42 42 from IPython.kernel.zmq import serialize
43 43
44 44 from .asyncresult import AsyncResult, AsyncHubResult
45 45 from .view import DirectView, LoadBalancedView
46 46
47 47 #--------------------------------------------------------------------------
48 48 # Decorators for Client methods
49 49 #--------------------------------------------------------------------------
50 50
51 51 @decorator
52 52 def spin_first(f, self, *args, **kwargs):
53 53 """Call spin() to sync state prior to calling the method."""
54 54 self.spin()
55 55 return f(self, *args, **kwargs)
56 56
57 57
58 58 #--------------------------------------------------------------------------
59 59 # Classes
60 60 #--------------------------------------------------------------------------
61 61
62 62
63 63 class ExecuteReply(RichOutput):
64 64 """wrapper for finished Execute results"""
65 65 def __init__(self, msg_id, content, metadata):
66 66 self.msg_id = msg_id
67 67 self._content = content
68 68 self.execution_count = content['execution_count']
69 69 self.metadata = metadata
70 70
71 71 # RichOutput overrides
72 72
73 73 @property
74 74 def source(self):
75 75 execute_result = self.metadata['execute_result']
76 76 if execute_result:
77 77 return execute_result.get('source', '')
78 78
79 79 @property
80 80 def data(self):
81 81 execute_result = self.metadata['execute_result']
82 82 if execute_result:
83 83 return execute_result.get('data', {})
84 84
85 85 @property
86 86 def _metadata(self):
87 87 execute_result = self.metadata['execute_result']
88 88 if execute_result:
89 89 return execute_result.get('metadata', {})
90 90
91 91 def display(self):
92 92 from IPython.display import publish_display_data
93 93 publish_display_data(self.source, self.data, self.metadata)
94 94
95 95 def _repr_mime_(self, mime):
96 96 if mime not in self.data:
97 97 return
98 98 data = self.data[mime]
99 99 if mime in self._metadata:
100 100 return data, self._metadata[mime]
101 101 else:
102 102 return data
103 103
104 104 def __getitem__(self, key):
105 105 return self.metadata[key]
106 106
107 107 def __getattr__(self, key):
108 108 if key not in self.metadata:
109 109 raise AttributeError(key)
110 110 return self.metadata[key]
111 111
112 112 def __repr__(self):
113 113 execute_result = self.metadata['execute_result'] or {'data':{}}
114 114 text_out = execute_result['data'].get('text/plain', '')
115 115 if len(text_out) > 32:
116 116 text_out = text_out[:29] + '...'
117 117
118 118 return "<ExecuteReply[%i]: %s>" % (self.execution_count, text_out)
119 119
120 120 def _repr_pretty_(self, p, cycle):
121 121 execute_result = self.metadata['execute_result'] or {'data':{}}
122 122 text_out = execute_result['data'].get('text/plain', '')
123 123
124 124 if not text_out:
125 125 return
126 126
127 127 try:
128 128 ip = get_ipython()
129 129 except NameError:
130 130 colors = "NoColor"
131 131 else:
132 132 colors = ip.colors
133 133
134 134 if colors == "NoColor":
135 135 out = normal = ""
136 136 else:
137 137 out = TermColors.Red
138 138 normal = TermColors.Normal
139 139
140 140 if '\n' in text_out and not text_out.startswith('\n'):
141 141 # add newline for multiline reprs
142 142 text_out = '\n' + text_out
143 143
144 144 p.text(
145 145 out + u'Out[%i:%i]: ' % (
146 146 self.metadata['engine_id'], self.execution_count
147 147 ) + normal + text_out
148 148 )
149 149
150 150
151 151 class Metadata(dict):
152 152 """Subclass of dict for initializing metadata values.
153 153
154 154 Attribute access works on keys.
155 155
156 156 These objects have a strict set of keys - errors will raise if you try
157 157 to add new keys.
158 158 """
159 159 def __init__(self, *args, **kwargs):
160 160 dict.__init__(self)
161 161 md = {'msg_id' : None,
162 162 'submitted' : None,
163 163 'started' : None,
164 164 'completed' : None,
165 165 'received' : None,
166 166 'engine_uuid' : None,
167 167 'engine_id' : None,
168 168 'follow' : None,
169 169 'after' : None,
170 170 'status' : None,
171 171
172 172 'execute_input' : None,
173 173 'execute_result' : None,
174 174 'error' : None,
175 175 'stdout' : '',
176 176 'stderr' : '',
177 177 'outputs' : [],
178 178 'data': {},
179 179 'outputs_ready' : False,
180 180 }
181 181 self.update(md)
182 182 self.update(dict(*args, **kwargs))
183 183
184 184 def __getattr__(self, key):
185 185 """getattr aliased to getitem"""
186 186 if key in self:
187 187 return self[key]
188 188 else:
189 189 raise AttributeError(key)
190 190
191 191 def __setattr__(self, key, value):
192 192 """setattr aliased to setitem, with strict"""
193 193 if key in self:
194 194 self[key] = value
195 195 else:
196 196 raise AttributeError(key)
197 197
198 198 def __setitem__(self, key, value):
199 199 """strict static key enforcement"""
200 200 if key in self:
201 201 dict.__setitem__(self, key, value)
202 202 else:
203 203 raise KeyError(key)
204 204
205 205
206 206 class Client(HasTraits):
207 207 """A semi-synchronous client to the IPython ZMQ cluster
208 208
209 209 Parameters
210 210 ----------
211 211
212 212 url_file : str/unicode; path to ipcontroller-client.json
213 213 This JSON file should contain all the information needed to connect to a cluster,
214 214 and is likely the only argument needed.
215 215 Connection information for the Hub's registration. If a json connector
216 216 file is given, then likely no further configuration is necessary.
217 217 [Default: use profile]
218 218 profile : bytes
219 219 The name of the Cluster profile to be used to find connector information.
220 220 If run from an IPython application, the default profile will be the same
221 221 as the running application, otherwise it will be 'default'.
222 222 cluster_id : str
223 223 String id to added to runtime files, to prevent name collisions when using
224 224 multiple clusters with a single profile simultaneously.
225 225 When set, will look for files named like: 'ipcontroller-<cluster_id>-client.json'
226 226 Since this is text inserted into filenames, typical recommendations apply:
227 227 Simple character strings are ideal, and spaces are not recommended (but
228 228 should generally work)
229 229 context : zmq.Context
230 230 Pass an existing zmq.Context instance, otherwise the client will create its own.
231 231 debug : bool
232 232 flag for lots of message printing for debug purposes
233 233 timeout : int/float
234 234 time (in seconds) to wait for connection replies from the Hub
235 235 [Default: 10]
236 236
237 237 #-------------- session related args ----------------
238 238
239 239 config : Config object
240 240 If specified, this will be relayed to the Session for configuration
241 241 username : str
242 242 set username for the session object
243 243
244 244 #-------------- ssh related args ----------------
245 245 # These are args for configuring the ssh tunnel to be used
246 246 # credentials are used to forward connections over ssh to the Controller
247 247 # Note that the ip given in `addr` needs to be relative to sshserver
248 248 # The most basic case is to leave addr as pointing to localhost (127.0.0.1),
249 249 # and set sshserver as the same machine the Controller is on. However,
250 250 # the only requirement is that sshserver is able to see the Controller
251 251 # (i.e. is within the same trusted network).
252 252
253 253 sshserver : str
254 254 A string of the form passed to ssh, i.e. 'server.tld' or 'user@server.tld:port'
255 255 If keyfile or password is specified, and this is not, it will default to
256 256 the ip given in addr.
257 257 sshkey : str; path to ssh private key file
258 258 This specifies a key to be used in ssh login, default None.
259 259 Regular default ssh keys will be used without specifying this argument.
260 260 password : str
261 261 Your ssh password to sshserver. Note that if this is left None,
262 262 you will be prompted for it if passwordless key based login is unavailable.
263 263 paramiko : bool
264 264 flag for whether to use paramiko instead of shell ssh for tunneling.
265 265 [default: True on win32, False else]
266 266
267 267
268 268 Attributes
269 269 ----------
270 270
271 271 ids : list of int engine IDs
272 272 requesting the ids attribute always synchronizes
273 273 the registration state. To request ids without synchronization,
274 274 use semi-private _ids attributes.
275 275
276 276 history : list of msg_ids
277 277 a list of msg_ids, keeping track of all the execution
278 278 messages you have submitted in order.
279 279
280 280 outstanding : set of msg_ids
281 281 a set of msg_ids that have been submitted, but whose
282 282 results have not yet been received.
283 283
284 284 results : dict
285 285 a dict of all our results, keyed by msg_id
286 286
287 287 block : bool
288 288 determines default behavior when block not specified
289 289 in execution methods
290 290
291 291 Methods
292 292 -------
293 293
294 294 spin
295 295 flushes incoming results and registration state changes
296 296 control methods spin, and requesting `ids` also ensures up to date
297 297
298 298 wait
299 299 wait on one or more msg_ids
300 300
301 301 execution methods
302 302 apply
303 303 legacy: execute, run
304 304
305 305 data movement
306 306 push, pull, scatter, gather
307 307
308 308 query methods
309 309 queue_status, get_result, purge, result_status
310 310
311 311 control methods
312 312 abort, shutdown
313 313
314 314 """
315 315
316 316
317 317 block = Bool(False)
318 318 outstanding = Set()
319 319 results = Instance('collections.defaultdict', (dict,))
320 320 metadata = Instance('collections.defaultdict', (Metadata,))
321 321 history = List()
322 322 debug = Bool(False)
323 323 _spin_thread = Any()
324 324 _stop_spinning = Any()
325 325
326 326 profile=Unicode()
327 327 def _profile_default(self):
328 328 if BaseIPythonApplication.initialized():
329 329 # an IPython app *might* be running, try to get its profile
330 330 try:
331 331 return BaseIPythonApplication.instance().profile
332 332 except (AttributeError, MultipleInstanceError):
333 333 # could be a *different* subclass of config.Application,
334 334 # which would raise one of these two errors.
335 335 return u'default'
336 336 else:
337 337 return u'default'
338 338
339 339
340 340 _outstanding_dict = Instance('collections.defaultdict', (set,))
341 341 _ids = List()
342 342 _connected=Bool(False)
343 343 _ssh=Bool(False)
344 344 _context = Instance('zmq.Context')
345 345 _config = Dict()
346 346 _engines=Instance(util.ReverseDict, (), {})
347 347 # _hub_socket=Instance('zmq.Socket')
348 348 _query_socket=Instance('zmq.Socket')
349 349 _control_socket=Instance('zmq.Socket')
350 350 _iopub_socket=Instance('zmq.Socket')
351 351 _notification_socket=Instance('zmq.Socket')
352 352 _mux_socket=Instance('zmq.Socket')
353 353 _task_socket=Instance('zmq.Socket')
354 354 _task_scheme=Unicode()
355 355 _closed = False
356 356 _ignored_control_replies=Integer(0)
357 357 _ignored_hub_replies=Integer(0)
358 358
359 359 def __new__(self, *args, **kw):
360 360 # don't raise on positional args
361 361 return HasTraits.__new__(self, **kw)
362 362
363 363 def __init__(self, url_file=None, profile=None, profile_dir=None, ipython_dir=None,
364 364 context=None, debug=False,
365 365 sshserver=None, sshkey=None, password=None, paramiko=None,
366 366 timeout=10, cluster_id=None, **extra_args
367 367 ):
368 368 if profile:
369 369 super(Client, self).__init__(debug=debug, profile=profile)
370 370 else:
371 371 super(Client, self).__init__(debug=debug)
372 372 if context is None:
373 373 context = zmq.Context.instance()
374 374 self._context = context
375 375 self._stop_spinning = Event()
376 376
377 377 if 'url_or_file' in extra_args:
378 378 url_file = extra_args['url_or_file']
379 379 warnings.warn("url_or_file arg no longer supported, use url_file", DeprecationWarning)
380 380
381 381 if url_file and util.is_url(url_file):
382 382 raise ValueError("single urls cannot be specified, url-files must be used.")
383 383
384 384 self._setup_profile_dir(self.profile, profile_dir, ipython_dir)
385 385
386 386 if self._cd is not None:
387 387 if url_file is None:
388 388 if not cluster_id:
389 389 client_json = 'ipcontroller-client.json'
390 390 else:
391 391 client_json = 'ipcontroller-%s-client.json' % cluster_id
392 392 url_file = pjoin(self._cd.security_dir, client_json)
393 393 if url_file is None:
394 394 raise ValueError(
395 395 "I can't find enough information to connect to a hub!"
396 396 " Please specify at least one of url_file or profile."
397 397 )
398 398
399 399 with open(url_file) as f:
400 400 cfg = json.load(f)
401 401
402 402 self._task_scheme = cfg['task_scheme']
403 403
404 404 # sync defaults from args, json:
405 405 if sshserver:
406 406 cfg['ssh'] = sshserver
407 407
408 408 location = cfg.setdefault('location', None)
409 409
410 410 proto,addr = cfg['interface'].split('://')
411 411 addr = util.disambiguate_ip_address(addr, location)
412 412 cfg['interface'] = "%s://%s" % (proto, addr)
413 413
414 414 # turn interface,port into full urls:
415 415 for key in ('control', 'task', 'mux', 'iopub', 'notification', 'registration'):
416 416 cfg[key] = cfg['interface'] + ':%i' % cfg[key]
417 417
418 418 url = cfg['registration']
419 419
420 420 if location is not None and addr == localhost():
421 421 # location specified, and connection is expected to be local
422 422 if not is_local_ip(location) and not sshserver:
423 423 # load ssh from JSON *only* if the controller is not on
424 424 # this machine
425 425 sshserver=cfg['ssh']
426 426 if not is_local_ip(location) and not sshserver:
427 427 # warn if no ssh specified, but SSH is probably needed
428 428 # This is only a warning, because the most likely cause
429 429 # is a local Controller on a laptop whose IP is dynamic
430 430 warnings.warn("""
431 431 Controller appears to be listening on localhost, but not on this machine.
432 432 If this is true, you should specify Client(...,sshserver='you@%s')
433 433 or instruct your controller to listen on an external IP."""%location,
434 434 RuntimeWarning)
435 435 elif not sshserver:
436 436 # otherwise sync with cfg
437 437 sshserver = cfg['ssh']
438 438
439 439 self._config = cfg
440 440
441 441 self._ssh = bool(sshserver or sshkey or password)
442 442 if self._ssh and sshserver is None:
443 443 # default to ssh via localhost
444 444 sshserver = addr
445 445 if self._ssh and password is None:
446 446 if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko):
447 447 password=False
448 448 else:
449 449 password = getpass("SSH Password for %s: "%sshserver)
450 450 ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko)
451 451
452 452 # configure and construct the session
453 453 try:
454 454 extra_args['packer'] = cfg['pack']
455 455 extra_args['unpacker'] = cfg['unpack']
456 456 extra_args['key'] = cast_bytes(cfg['key'])
457 457 extra_args['signature_scheme'] = cfg['signature_scheme']
458 458 except KeyError as exc:
459 459 msg = '\n'.join([
460 460 "Connection file is invalid (missing '{}'), possibly from an old version of IPython.",
461 461 "If you are reusing connection files, remove them and start ipcontroller again."
462 462 ])
463 463 raise ValueError(msg.format(exc.message))
464 464
465 465 self.session = Session(**extra_args)
466 466
467 467 self._query_socket = self._context.socket(zmq.DEALER)
468 468
469 469 if self._ssh:
470 470 tunnel.tunnel_connection(self._query_socket, cfg['registration'], sshserver, **ssh_kwargs)
471 471 else:
472 472 self._query_socket.connect(cfg['registration'])
473 473
474 474 self.session.debug = self.debug
475 475
476 476 self._notification_handlers = {'registration_notification' : self._register_engine,
477 477 'unregistration_notification' : self._unregister_engine,
478 478 'shutdown_notification' : lambda msg: self.close(),
479 479 }
480 480 self._queue_handlers = {'execute_reply' : self._handle_execute_reply,
481 481 'apply_reply' : self._handle_apply_reply}
482 482
483 483 try:
484 484 self._connect(sshserver, ssh_kwargs, timeout)
485 485 except:
486 486 self.close(linger=0)
487 487 raise
488 488
489 489 # last step: setup magics, if we are in IPython:
490 490
491 491 try:
492 492 ip = get_ipython()
493 493 except NameError:
494 494 return
495 495 else:
496 496 if 'px' not in ip.magics_manager.magics:
497 497 # in IPython but we are the first Client.
498 498 # activate a default view for parallel magics.
499 499 self.activate()
500 500
501 501 def __del__(self):
502 502 """cleanup sockets, but _not_ context."""
503 503 self.close()
504 504
505 505 def _setup_profile_dir(self, profile, profile_dir, ipython_dir):
506 506 if ipython_dir is None:
507 507 ipython_dir = get_ipython_dir()
508 508 if profile_dir is not None:
509 509 try:
510 510 self._cd = ProfileDir.find_profile_dir(profile_dir)
511 511 return
512 512 except ProfileDirError:
513 513 pass
514 514 elif profile is not None:
515 515 try:
516 516 self._cd = ProfileDir.find_profile_dir_by_name(
517 517 ipython_dir, profile)
518 518 return
519 519 except ProfileDirError:
520 520 pass
521 521 self._cd = None
522 522
523 523 def _update_engines(self, engines):
524 524 """Update our engines dict and _ids from a dict of the form: {id:uuid}."""
525 525 for k,v in iteritems(engines):
526 526 eid = int(k)
527 527 if eid not in self._engines:
528 528 self._ids.append(eid)
529 529 self._engines[eid] = v
530 530 self._ids = sorted(self._ids)
531 531 if sorted(self._engines.keys()) != list(range(len(self._engines))) and \
532 532 self._task_scheme == 'pure' and self._task_socket:
533 533 self._stop_scheduling_tasks()
534 534
535 535 def _stop_scheduling_tasks(self):
536 536 """Stop scheduling tasks because an engine has been unregistered
537 537 from a pure ZMQ scheduler.
538 538 """
539 539 self._task_socket.close()
540 540 self._task_socket = None
541 541 msg = "An engine has been unregistered, and we are using pure " +\
542 542 "ZMQ task scheduling. Task farming will be disabled."
543 543 if self.outstanding:
544 544 msg += " If you were running tasks when this happened, " +\
545 545 "some `outstanding` msg_ids may never resolve."
546 546 warnings.warn(msg, RuntimeWarning)
547 547
548 548 def _build_targets(self, targets):
549 549 """Turn valid target IDs or 'all' into two lists:
550 550 (int_ids, uuids).
551 551 """
552 552 if not self._ids:
553 553 # flush notification socket if no engines yet, just in case
554 554 if not self.ids:
555 555 raise error.NoEnginesRegistered("Can't build targets without any engines")
556 556
557 557 if targets is None:
558 558 targets = self._ids
559 559 elif isinstance(targets, string_types):
560 560 if targets.lower() == 'all':
561 561 targets = self._ids
562 562 else:
563 563 raise TypeError("%r not valid str target, must be 'all'"%(targets))
564 564 elif isinstance(targets, int):
565 565 if targets < 0:
566 566 targets = self.ids[targets]
567 567 if targets not in self._ids:
568 568 raise IndexError("No such engine: %i"%targets)
569 569 targets = [targets]
570 570
571 571 if isinstance(targets, slice):
572 572 indices = list(range(len(self._ids))[targets])
573 573 ids = self.ids
574 574 targets = [ ids[i] for i in indices ]
575 575
576 576 if not isinstance(targets, (tuple, list, xrange)):
577 577 raise TypeError("targets by int/slice/collection of ints only, not %s"%(type(targets)))
578 578
579 579 return [cast_bytes(self._engines[t]) for t in targets], list(targets)
580 580
581 581 def _connect(self, sshserver, ssh_kwargs, timeout):
582 582 """setup all our socket connections to the cluster. This is called from
583 583 __init__."""
584 584
585 585 # Maybe allow reconnecting?
586 586 if self._connected:
587 587 return
588 588 self._connected=True
589 589
590 590 def connect_socket(s, url):
591 591 if self._ssh:
592 592 return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs)
593 593 else:
594 594 return s.connect(url)
595 595
596 596 self.session.send(self._query_socket, 'connection_request')
597 597 # use Poller because zmq.select has wrong units in pyzmq 2.1.7
598 598 poller = zmq.Poller()
599 599 poller.register(self._query_socket, zmq.POLLIN)
600 600 # poll expects milliseconds, timeout is seconds
601 601 evts = poller.poll(timeout*1000)
602 602 if not evts:
603 603 raise error.TimeoutError("Hub connection request timed out")
604 604 idents,msg = self.session.recv(self._query_socket,mode=0)
605 605 if self.debug:
606 606 pprint(msg)
607 607 content = msg['content']
608 608 # self._config['registration'] = dict(content)
609 609 cfg = self._config
610 610 if content['status'] == 'ok':
611 611 self._mux_socket = self._context.socket(zmq.DEALER)
612 612 connect_socket(self._mux_socket, cfg['mux'])
613 613
614 614 self._task_socket = self._context.socket(zmq.DEALER)
615 615 connect_socket(self._task_socket, cfg['task'])
616 616
617 617 self._notification_socket = self._context.socket(zmq.SUB)
618 618 self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'')
619 619 connect_socket(self._notification_socket, cfg['notification'])
620 620
621 621 self._control_socket = self._context.socket(zmq.DEALER)
622 622 connect_socket(self._control_socket, cfg['control'])
623 623
624 624 self._iopub_socket = self._context.socket(zmq.SUB)
625 625 self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'')
626 626 connect_socket(self._iopub_socket, cfg['iopub'])
627 627
628 628 self._update_engines(dict(content['engines']))
629 629 else:
630 630 self._connected = False
631 631 raise Exception("Failed to connect!")
632 632
633 633 #--------------------------------------------------------------------------
634 634 # handlers and callbacks for incoming messages
635 635 #--------------------------------------------------------------------------
636 636
637 637 def _unwrap_exception(self, content):
638 638 """unwrap exception, and remap engine_id to int."""
639 639 e = error.unwrap_exception(content)
640 640 # print e.traceback
641 641 if e.engine_info:
642 642 e_uuid = e.engine_info['engine_uuid']
643 643 eid = self._engines[e_uuid]
644 644 e.engine_info['engine_id'] = eid
645 645 return e
646 646
647 647 def _extract_metadata(self, msg):
648 648 header = msg['header']
649 649 parent = msg['parent_header']
650 650 msg_meta = msg['metadata']
651 651 content = msg['content']
652 652 md = {'msg_id' : parent['msg_id'],
653 653 'received' : datetime.now(),
654 654 'engine_uuid' : msg_meta.get('engine', None),
655 655 'follow' : msg_meta.get('follow', []),
656 656 'after' : msg_meta.get('after', []),
657 657 'status' : content['status'],
658 658 }
659 659
660 660 if md['engine_uuid'] is not None:
661 661 md['engine_id'] = self._engines.get(md['engine_uuid'], None)
662 662
663 663 if 'date' in parent:
664 664 md['submitted'] = parent['date']
665 665 if 'started' in msg_meta:
666 666 md['started'] = parse_date(msg_meta['started'])
667 667 if 'date' in header:
668 668 md['completed'] = header['date']
669 669 return md
670 670
671 671 def _register_engine(self, msg):
672 672 """Register a new engine, and update our connection info."""
673 673 content = msg['content']
674 674 eid = content['id']
675 675 d = {eid : content['uuid']}
676 676 self._update_engines(d)
677 677
678 678 def _unregister_engine(self, msg):
679 679 """Unregister an engine that has died."""
680 680 content = msg['content']
681 681 eid = int(content['id'])
682 682 if eid in self._ids:
683 683 self._ids.remove(eid)
684 684 uuid = self._engines.pop(eid)
685 685
686 686 self._handle_stranded_msgs(eid, uuid)
687 687
688 688 if self._task_socket and self._task_scheme == 'pure':
689 689 self._stop_scheduling_tasks()
690 690
691 691 def _handle_stranded_msgs(self, eid, uuid):
692 692 """Handle messages known to be on an engine when the engine unregisters.
693 693
694 694 It is possible that this will fire prematurely - that is, an engine will
695 695 go down after completing a result, and the client will be notified
696 696 of the unregistration and later receive the successful result.
697 697 """
698 698
699 699 outstanding = self._outstanding_dict[uuid]
700 700
701 701 for msg_id in list(outstanding):
702 702 if msg_id in self.results:
703 703 # we already
704 704 continue
705 705 try:
706 706 raise error.EngineError("Engine %r died while running task %r"%(eid, msg_id))
707 707 except:
708 708 content = error.wrap_exception()
709 709 # build a fake message:
710 710 msg = self.session.msg('apply_reply', content=content)
711 711 msg['parent_header']['msg_id'] = msg_id
712 712 msg['metadata']['engine'] = uuid
713 713 self._handle_apply_reply(msg)
714 714
715 715 def _handle_execute_reply(self, msg):
716 716 """Save the reply to an execute_request into our results.
717 717
718 718 execute messages are never actually used. apply is used instead.
719 719 """
720 720
721 721 parent = msg['parent_header']
722 722 msg_id = parent['msg_id']
723 723 if msg_id not in self.outstanding:
724 724 if msg_id in self.history:
725 725 print("got stale result: %s"%msg_id)
726 726 else:
727 727 print("got unknown result: %s"%msg_id)
728 728 else:
729 729 self.outstanding.remove(msg_id)
730 730
731 731 content = msg['content']
732 732 header = msg['header']
733 733
734 734 # construct metadata:
735 735 md = self.metadata[msg_id]
736 736 md.update(self._extract_metadata(msg))
737 737 # is this redundant?
738 738 self.metadata[msg_id] = md
739 739
740 740 e_outstanding = self._outstanding_dict[md['engine_uuid']]
741 741 if msg_id in e_outstanding:
742 742 e_outstanding.remove(msg_id)
743 743
744 744 # construct result:
745 745 if content['status'] == 'ok':
746 746 self.results[msg_id] = ExecuteReply(msg_id, content, md)
747 747 elif content['status'] == 'aborted':
748 748 self.results[msg_id] = error.TaskAborted(msg_id)
749 749 elif content['status'] == 'resubmitted':
750 750 # TODO: handle resubmission
751 751 pass
752 752 else:
753 753 self.results[msg_id] = self._unwrap_exception(content)
754 754
755 755 def _handle_apply_reply(self, msg):
756 756 """Save the reply to an apply_request into our results."""
757 757 parent = msg['parent_header']
758 758 msg_id = parent['msg_id']
759 759 if msg_id not in self.outstanding:
760 760 if msg_id in self.history:
761 761 print("got stale result: %s"%msg_id)
762 762 print(self.results[msg_id])
763 763 print(msg)
764 764 else:
765 765 print("got unknown result: %s"%msg_id)
766 766 else:
767 767 self.outstanding.remove(msg_id)
768 768 content = msg['content']
769 769 header = msg['header']
770 770
771 771 # construct metadata:
772 772 md = self.metadata[msg_id]
773 773 md.update(self._extract_metadata(msg))
774 774 # is this redundant?
775 775 self.metadata[msg_id] = md
776 776
777 777 e_outstanding = self._outstanding_dict[md['engine_uuid']]
778 778 if msg_id in e_outstanding:
779 779 e_outstanding.remove(msg_id)
780 780
781 781 # construct result:
782 782 if content['status'] == 'ok':
783 783 self.results[msg_id] = serialize.unserialize_object(msg['buffers'])[0]
784 784 elif content['status'] == 'aborted':
785 785 self.results[msg_id] = error.TaskAborted(msg_id)
786 786 elif content['status'] == 'resubmitted':
787 787 # TODO: handle resubmission
788 788 pass
789 789 else:
790 790 self.results[msg_id] = self._unwrap_exception(content)
791 791
792 792 def _flush_notifications(self):
793 793 """Flush notifications of engine registrations waiting
794 794 in ZMQ queue."""
795 795 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
796 796 while msg is not None:
797 797 if self.debug:
798 798 pprint(msg)
799 799 msg_type = msg['header']['msg_type']
800 800 handler = self._notification_handlers.get(msg_type, None)
801 801 if handler is None:
802 802 raise Exception("Unhandled message type: %s" % msg_type)
803 803 else:
804 804 handler(msg)
805 805 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
806 806
807 807 def _flush_results(self, sock):
808 808 """Flush task or queue results waiting in ZMQ queue."""
809 809 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
810 810 while msg is not None:
811 811 if self.debug:
812 812 pprint(msg)
813 813 msg_type = msg['header']['msg_type']
814 814 handler = self._queue_handlers.get(msg_type, None)
815 815 if handler is None:
816 816 raise Exception("Unhandled message type: %s" % msg_type)
817 817 else:
818 818 handler(msg)
819 819 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
820 820
821 821 def _flush_control(self, sock):
822 822 """Flush replies from the control channel waiting
823 823 in the ZMQ queue.
824 824
825 825 Currently: ignore them."""
826 826 if self._ignored_control_replies <= 0:
827 827 return
828 828 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
829 829 while msg is not None:
830 830 self._ignored_control_replies -= 1
831 831 if self.debug:
832 832 pprint(msg)
833 833 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
834 834
835 835 def _flush_ignored_control(self):
836 836 """flush ignored control replies"""
837 837 while self._ignored_control_replies > 0:
838 838 self.session.recv(self._control_socket)
839 839 self._ignored_control_replies -= 1
840 840
841 841 def _flush_ignored_hub_replies(self):
842 842 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
843 843 while msg is not None:
844 844 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
845 845
846 846 def _flush_iopub(self, sock):
847 847 """Flush replies from the iopub channel waiting
848 848 in the ZMQ queue.
849 849 """
850 850 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
851 851 while msg is not None:
852 852 if self.debug:
853 853 pprint(msg)
854 854 parent = msg['parent_header']
855 855 # ignore IOPub messages with no parent.
856 856 # Caused by print statements or warnings from before the first execution.
857 857 if not parent:
858 858 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
859 859 continue
860 860 msg_id = parent['msg_id']
861 861 content = msg['content']
862 862 header = msg['header']
863 863 msg_type = msg['header']['msg_type']
864 864
865 865 # init metadata:
866 866 md = self.metadata[msg_id]
867 867
868 868 if msg_type == 'stream':
869 869 name = content['name']
870 870 s = md[name] or ''
871 871 md[name] = s + content['data']
872 872 elif msg_type == 'error':
873 873 md.update({'error' : self._unwrap_exception(content)})
874 874 elif msg_type == 'execute_input':
875 875 md.update({'execute_input' : content['code']})
876 876 elif msg_type == 'display_data':
877 877 md['outputs'].append(content)
878 878 elif msg_type == 'execute_result':
879 879 md['execute_result'] = content
880 880 elif msg_type == 'data_message':
881 881 data, remainder = serialize.unserialize_object(msg['buffers'])
882 882 md['data'].update(data)
883 883 elif msg_type == 'status':
884 884 # idle message comes after all outputs
885 885 if content['execution_state'] == 'idle':
886 886 md['outputs_ready'] = True
887 887 else:
888 888 # unhandled msg_type (status, etc.)
889 889 pass
890 890
891 891 # reduntant?
892 892 self.metadata[msg_id] = md
893 893
894 894 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
895 895
896 896 #--------------------------------------------------------------------------
897 897 # len, getitem
898 898 #--------------------------------------------------------------------------
899 899
900 900 def __len__(self):
901 901 """len(client) returns # of engines."""
902 902 return len(self.ids)
903 903
904 904 def __getitem__(self, key):
905 905 """index access returns DirectView multiplexer objects
906 906
907 907 Must be int, slice, or list/tuple/xrange of ints"""
908 908 if not isinstance(key, (int, slice, tuple, list, xrange)):
909 909 raise TypeError("key by int/slice/iterable of ints only, not %s"%(type(key)))
910 910 else:
911 911 return self.direct_view(key)
912 912
913 913 def __iter__(self):
914 914 """Since we define getitem, Client is iterable
915 915
916 916 but unless we also define __iter__, it won't work correctly unless engine IDs
917 917 start at zero and are continuous.
918 918 """
919 919 for eid in self.ids:
920 920 yield self.direct_view(eid)
921 921
922 922 #--------------------------------------------------------------------------
923 923 # Begin public methods
924 924 #--------------------------------------------------------------------------
925 925
926 926 @property
927 927 def ids(self):
928 928 """Always up-to-date ids property."""
929 929 self._flush_notifications()
930 930 # always copy:
931 931 return list(self._ids)
932 932
933 933 def activate(self, targets='all', suffix=''):
934 934 """Create a DirectView and register it with IPython magics
935 935
936 936 Defines the magics `%px, %autopx, %pxresult, %%px`
937 937
938 938 Parameters
939 939 ----------
940 940
941 941 targets: int, list of ints, or 'all'
942 942 The engines on which the view's magics will run
943 943 suffix: str [default: '']
944 944 The suffix, if any, for the magics. This allows you to have
945 945 multiple views associated with parallel magics at the same time.
946 946
947 947 e.g. ``rc.activate(targets=0, suffix='0')`` will give you
948 948 the magics ``%px0``, ``%pxresult0``, etc. for running magics just
949 949 on engine 0.
950 950 """
951 951 view = self.direct_view(targets)
952 952 view.block = True
953 953 view.activate(suffix)
954 954 return view
955 955
956 956 def close(self, linger=None):
957 957 """Close my zmq Sockets
958 958
959 959 If `linger`, set the zmq LINGER socket option,
960 960 which allows discarding of messages.
961 961 """
962 962 if self._closed:
963 963 return
964 964 self.stop_spin_thread()
965 965 snames = [ trait for trait in self.trait_names() if trait.endswith("socket") ]
966 966 for name in snames:
967 967 socket = getattr(self, name)
968 968 if socket is not None and not socket.closed:
969 969 if linger is not None:
970 970 socket.close(linger=linger)
971 971 else:
972 972 socket.close()
973 973 self._closed = True
974 974
975 975 def _spin_every(self, interval=1):
976 976 """target func for use in spin_thread"""
977 977 while True:
978 978 if self._stop_spinning.is_set():
979 979 return
980 980 time.sleep(interval)
981 981 self.spin()
982 982
983 983 def spin_thread(self, interval=1):
984 984 """call Client.spin() in a background thread on some regular interval
985 985
986 986 This helps ensure that messages don't pile up too much in the zmq queue
987 987 while you are working on other things, or just leaving an idle terminal.
988 988
989 989 It also helps limit potential padding of the `received` timestamp
990 990 on AsyncResult objects, used for timings.
991 991
992 992 Parameters
993 993 ----------
994 994
995 995 interval : float, optional
996 996 The interval on which to spin the client in the background thread
997 997 (simply passed to time.sleep).
998 998
999 999 Notes
1000 1000 -----
1001 1001
1002 1002 For precision timing, you may want to use this method to put a bound
1003 1003 on the jitter (in seconds) in `received` timestamps used
1004 1004 in AsyncResult.wall_time.
1005 1005
1006 1006 """
1007 1007 if self._spin_thread is not None:
1008 1008 self.stop_spin_thread()
1009 1009 self._stop_spinning.clear()
1010 1010 self._spin_thread = Thread(target=self._spin_every, args=(interval,))
1011 1011 self._spin_thread.daemon = True
1012 1012 self._spin_thread.start()
1013 1013
1014 1014 def stop_spin_thread(self):
1015 1015 """stop background spin_thread, if any"""
1016 1016 if self._spin_thread is not None:
1017 1017 self._stop_spinning.set()
1018 1018 self._spin_thread.join()
1019 1019 self._spin_thread = None
1020 1020
1021 1021 def spin(self):
1022 1022 """Flush any registration notifications and execution results
1023 1023 waiting in the ZMQ queue.
1024 1024 """
1025 1025 if self._notification_socket:
1026 1026 self._flush_notifications()
1027 1027 if self._iopub_socket:
1028 1028 self._flush_iopub(self._iopub_socket)
1029 1029 if self._mux_socket:
1030 1030 self._flush_results(self._mux_socket)
1031 1031 if self._task_socket:
1032 1032 self._flush_results(self._task_socket)
1033 1033 if self._control_socket:
1034 1034 self._flush_control(self._control_socket)
1035 1035 if self._query_socket:
1036 1036 self._flush_ignored_hub_replies()
1037 1037
1038 1038 def wait(self, jobs=None, timeout=-1):
1039 1039 """waits on one or more `jobs`, for up to `timeout` seconds.
1040 1040
1041 1041 Parameters
1042 1042 ----------
1043 1043
1044 1044 jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects
1045 1045 ints are indices to self.history
1046 1046 strs are msg_ids
1047 1047 default: wait on all outstanding messages
1048 1048 timeout : float
1049 1049 a time in seconds, after which to give up.
1050 1050 default is -1, which means no timeout
1051 1051
1052 1052 Returns
1053 1053 -------
1054 1054
1055 1055 True : when all msg_ids are done
1056 1056 False : timeout reached, some msg_ids still outstanding
1057 1057 """
1058 1058 tic = time.time()
1059 1059 if jobs is None:
1060 1060 theids = self.outstanding
1061 1061 else:
1062 1062 if isinstance(jobs, string_types + (int, AsyncResult)):
1063 1063 jobs = [jobs]
1064 1064 theids = set()
1065 1065 for job in jobs:
1066 1066 if isinstance(job, int):
1067 1067 # index access
1068 1068 job = self.history[job]
1069 1069 elif isinstance(job, AsyncResult):
1070 1070 theids.update(job.msg_ids)
1071 1071 continue
1072 1072 theids.add(job)
1073 1073 if not theids.intersection(self.outstanding):
1074 1074 return True
1075 1075 self.spin()
1076 1076 while theids.intersection(self.outstanding):
1077 1077 if timeout >= 0 and ( time.time()-tic ) > timeout:
1078 1078 break
1079 1079 time.sleep(1e-3)
1080 1080 self.spin()
1081 1081 return len(theids.intersection(self.outstanding)) == 0
1082 1082
1083 1083 #--------------------------------------------------------------------------
1084 1084 # Control methods
1085 1085 #--------------------------------------------------------------------------
1086 1086
1087 1087 @spin_first
1088 1088 def clear(self, targets=None, block=None):
1089 1089 """Clear the namespace in target(s)."""
1090 1090 block = self.block if block is None else block
1091 1091 targets = self._build_targets(targets)[0]
1092 1092 for t in targets:
1093 1093 self.session.send(self._control_socket, 'clear_request', content={}, ident=t)
1094 1094 error = False
1095 1095 if block:
1096 1096 self._flush_ignored_control()
1097 1097 for i in range(len(targets)):
1098 1098 idents,msg = self.session.recv(self._control_socket,0)
1099 1099 if self.debug:
1100 1100 pprint(msg)
1101 1101 if msg['content']['status'] != 'ok':
1102 1102 error = self._unwrap_exception(msg['content'])
1103 1103 else:
1104 1104 self._ignored_control_replies += len(targets)
1105 1105 if error:
1106 1106 raise error
1107 1107
1108 1108
1109 1109 @spin_first
1110 1110 def abort(self, jobs=None, targets=None, block=None):
1111 1111 """Abort specific jobs from the execution queues of target(s).
1112 1112
1113 1113 This is a mechanism to prevent jobs that have already been submitted
1114 1114 from executing.
1115 1115
1116 1116 Parameters
1117 1117 ----------
1118 1118
1119 1119 jobs : msg_id, list of msg_ids, or AsyncResult
1120 1120 The jobs to be aborted
1121 1121
1122 1122 If unspecified/None: abort all outstanding jobs.
1123 1123
1124 1124 """
1125 1125 block = self.block if block is None else block
1126 1126 jobs = jobs if jobs is not None else list(self.outstanding)
1127 1127 targets = self._build_targets(targets)[0]
1128 1128
1129 1129 msg_ids = []
1130 1130 if isinstance(jobs, string_types + (AsyncResult,)):
1131 1131 jobs = [jobs]
1132 1132 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1133 1133 if bad_ids:
1134 1134 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1135 1135 for j in jobs:
1136 1136 if isinstance(j, AsyncResult):
1137 1137 msg_ids.extend(j.msg_ids)
1138 1138 else:
1139 1139 msg_ids.append(j)
1140 1140 content = dict(msg_ids=msg_ids)
1141 1141 for t in targets:
1142 1142 self.session.send(self._control_socket, 'abort_request',
1143 1143 content=content, ident=t)
1144 1144 error = False
1145 1145 if block:
1146 1146 self._flush_ignored_control()
1147 1147 for i in range(len(targets)):
1148 1148 idents,msg = self.session.recv(self._control_socket,0)
1149 1149 if self.debug:
1150 1150 pprint(msg)
1151 1151 if msg['content']['status'] != 'ok':
1152 1152 error = self._unwrap_exception(msg['content'])
1153 1153 else:
1154 1154 self._ignored_control_replies += len(targets)
1155 1155 if error:
1156 1156 raise error
1157 1157
1158 1158 @spin_first
1159 1159 def shutdown(self, targets='all', restart=False, hub=False, block=None):
1160 1160 """Terminates one or more engine processes, optionally including the hub.
1161 1161
1162 1162 Parameters
1163 1163 ----------
1164 1164
1165 1165 targets: list of ints or 'all' [default: all]
1166 1166 Which engines to shutdown.
1167 1167 hub: bool [default: False]
1168 1168 Whether to include the Hub. hub=True implies targets='all'.
1169 1169 block: bool [default: self.block]
1170 1170 Whether to wait for clean shutdown replies or not.
1171 1171 restart: bool [default: False]
1172 1172 NOT IMPLEMENTED
1173 1173 whether to restart engines after shutting them down.
1174 1174 """
1175 1175 from IPython.parallel.error import NoEnginesRegistered
1176 1176 if restart:
1177 1177 raise NotImplementedError("Engine restart is not yet implemented")
1178 1178
1179 1179 block = self.block if block is None else block
1180 1180 if hub:
1181 1181 targets = 'all'
1182 1182 try:
1183 1183 targets = self._build_targets(targets)[0]
1184 1184 except NoEnginesRegistered:
1185 1185 targets = []
1186 1186 for t in targets:
1187 1187 self.session.send(self._control_socket, 'shutdown_request',
1188 1188 content={'restart':restart},ident=t)
1189 1189 error = False
1190 1190 if block or hub:
1191 1191 self._flush_ignored_control()
1192 1192 for i in range(len(targets)):
1193 1193 idents,msg = self.session.recv(self._control_socket, 0)
1194 1194 if self.debug:
1195 1195 pprint(msg)
1196 1196 if msg['content']['status'] != 'ok':
1197 1197 error = self._unwrap_exception(msg['content'])
1198 1198 else:
1199 1199 self._ignored_control_replies += len(targets)
1200 1200
1201 1201 if hub:
1202 1202 time.sleep(0.25)
1203 1203 self.session.send(self._query_socket, 'shutdown_request')
1204 1204 idents,msg = self.session.recv(self._query_socket, 0)
1205 1205 if self.debug:
1206 1206 pprint(msg)
1207 1207 if msg['content']['status'] != 'ok':
1208 1208 error = self._unwrap_exception(msg['content'])
1209 1209
1210 1210 if error:
1211 1211 raise error
1212 1212
1213 1213 #--------------------------------------------------------------------------
1214 1214 # Execution related methods
1215 1215 #--------------------------------------------------------------------------
1216 1216
1217 1217 def _maybe_raise(self, result):
1218 1218 """wrapper for maybe raising an exception if apply failed."""
1219 1219 if isinstance(result, error.RemoteError):
1220 1220 raise result
1221 1221
1222 1222 return result
1223 1223
1224 1224 def send_apply_request(self, socket, f, args=None, kwargs=None, metadata=None, track=False,
1225 1225 ident=None):
1226 1226 """construct and send an apply message via a socket.
1227 1227
1228 1228 This is the principal method with which all engine execution is performed by views.
1229 1229 """
1230 1230
1231 1231 if self._closed:
1232 1232 raise RuntimeError("Client cannot be used after its sockets have been closed")
1233 1233
1234 1234 # defaults:
1235 1235 args = args if args is not None else []
1236 1236 kwargs = kwargs if kwargs is not None else {}
1237 1237 metadata = metadata if metadata is not None else {}
1238 1238
1239 1239 # validate arguments
1240 1240 if not callable(f) and not isinstance(f, Reference):
1241 1241 raise TypeError("f must be callable, not %s"%type(f))
1242 1242 if not isinstance(args, (tuple, list)):
1243 1243 raise TypeError("args must be tuple or list, not %s"%type(args))
1244 1244 if not isinstance(kwargs, dict):
1245 1245 raise TypeError("kwargs must be dict, not %s"%type(kwargs))
1246 1246 if not isinstance(metadata, dict):
1247 1247 raise TypeError("metadata must be dict, not %s"%type(metadata))
1248 1248
1249 1249 bufs = serialize.pack_apply_message(f, args, kwargs,
1250 1250 buffer_threshold=self.session.buffer_threshold,
1251 1251 item_threshold=self.session.item_threshold,
1252 1252 )
1253 1253
1254 1254 msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident,
1255 1255 metadata=metadata, track=track)
1256 1256
1257 1257 msg_id = msg['header']['msg_id']
1258 1258 self.outstanding.add(msg_id)
1259 1259 if ident:
1260 1260 # possibly routed to a specific engine
1261 1261 if isinstance(ident, list):
1262 1262 ident = ident[-1]
1263 1263 if ident in self._engines.values():
1264 1264 # save for later, in case of engine death
1265 1265 self._outstanding_dict[ident].add(msg_id)
1266 1266 self.history.append(msg_id)
1267 1267 self.metadata[msg_id]['submitted'] = datetime.now()
1268 1268
1269 1269 return msg
1270 1270
1271 1271 def send_execute_request(self, socket, code, silent=True, metadata=None, ident=None):
1272 1272 """construct and send an execute request via a socket.
1273 1273
1274 1274 """
1275 1275
1276 1276 if self._closed:
1277 1277 raise RuntimeError("Client cannot be used after its sockets have been closed")
1278 1278
1279 1279 # defaults:
1280 1280 metadata = metadata if metadata is not None else {}
1281 1281
1282 1282 # validate arguments
1283 1283 if not isinstance(code, string_types):
1284 1284 raise TypeError("code must be text, not %s" % type(code))
1285 1285 if not isinstance(metadata, dict):
1286 1286 raise TypeError("metadata must be dict, not %s" % type(metadata))
1287 1287
1288 content = dict(code=code, silent=bool(silent), user_variables=[], user_expressions={})
1288 content = dict(code=code, silent=bool(silent), user_expressions={})
1289 1289
1290 1290
1291 1291 msg = self.session.send(socket, "execute_request", content=content, ident=ident,
1292 1292 metadata=metadata)
1293 1293
1294 1294 msg_id = msg['header']['msg_id']
1295 1295 self.outstanding.add(msg_id)
1296 1296 if ident:
1297 1297 # possibly routed to a specific engine
1298 1298 if isinstance(ident, list):
1299 1299 ident = ident[-1]
1300 1300 if ident in self._engines.values():
1301 1301 # save for later, in case of engine death
1302 1302 self._outstanding_dict[ident].add(msg_id)
1303 1303 self.history.append(msg_id)
1304 1304 self.metadata[msg_id]['submitted'] = datetime.now()
1305 1305
1306 1306 return msg
1307 1307
1308 1308 #--------------------------------------------------------------------------
1309 1309 # construct a View object
1310 1310 #--------------------------------------------------------------------------
1311 1311
1312 1312 def load_balanced_view(self, targets=None):
1313 1313 """construct a DirectView object.
1314 1314
1315 1315 If no arguments are specified, create a LoadBalancedView
1316 1316 using all engines.
1317 1317
1318 1318 Parameters
1319 1319 ----------
1320 1320
1321 1321 targets: list,slice,int,etc. [default: use all engines]
1322 1322 The subset of engines across which to load-balance
1323 1323 """
1324 1324 if targets == 'all':
1325 1325 targets = None
1326 1326 if targets is not None:
1327 1327 targets = self._build_targets(targets)[1]
1328 1328 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
1329 1329
1330 1330 def direct_view(self, targets='all'):
1331 1331 """construct a DirectView object.
1332 1332
1333 1333 If no targets are specified, create a DirectView using all engines.
1334 1334
1335 1335 rc.direct_view('all') is distinguished from rc[:] in that 'all' will
1336 1336 evaluate the target engines at each execution, whereas rc[:] will connect to
1337 1337 all *current* engines, and that list will not change.
1338 1338
1339 1339 That is, 'all' will always use all engines, whereas rc[:] will not use
1340 1340 engines added after the DirectView is constructed.
1341 1341
1342 1342 Parameters
1343 1343 ----------
1344 1344
1345 1345 targets: list,slice,int,etc. [default: use all engines]
1346 1346 The engines to use for the View
1347 1347 """
1348 1348 single = isinstance(targets, int)
1349 1349 # allow 'all' to be lazily evaluated at each execution
1350 1350 if targets != 'all':
1351 1351 targets = self._build_targets(targets)[1]
1352 1352 if single:
1353 1353 targets = targets[0]
1354 1354 return DirectView(client=self, socket=self._mux_socket, targets=targets)
1355 1355
1356 1356 #--------------------------------------------------------------------------
1357 1357 # Query methods
1358 1358 #--------------------------------------------------------------------------
1359 1359
1360 1360 @spin_first
1361 1361 def get_result(self, indices_or_msg_ids=None, block=None):
1362 1362 """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
1363 1363
1364 1364 If the client already has the results, no request to the Hub will be made.
1365 1365
1366 1366 This is a convenient way to construct AsyncResult objects, which are wrappers
1367 1367 that include metadata about execution, and allow for awaiting results that
1368 1368 were not submitted by this Client.
1369 1369
1370 1370 It can also be a convenient way to retrieve the metadata associated with
1371 1371 blocking execution, since it always retrieves
1372 1372
1373 1373 Examples
1374 1374 --------
1375 1375 ::
1376 1376
1377 1377 In [10]: r = client.apply()
1378 1378
1379 1379 Parameters
1380 1380 ----------
1381 1381
1382 1382 indices_or_msg_ids : integer history index, str msg_id, or list of either
1383 1383 The indices or msg_ids of indices to be retrieved
1384 1384
1385 1385 block : bool
1386 1386 Whether to wait for the result to be done
1387 1387
1388 1388 Returns
1389 1389 -------
1390 1390
1391 1391 AsyncResult
1392 1392 A single AsyncResult object will always be returned.
1393 1393
1394 1394 AsyncHubResult
1395 1395 A subclass of AsyncResult that retrieves results from the Hub
1396 1396
1397 1397 """
1398 1398 block = self.block if block is None else block
1399 1399 if indices_or_msg_ids is None:
1400 1400 indices_or_msg_ids = -1
1401 1401
1402 1402 single_result = False
1403 1403 if not isinstance(indices_or_msg_ids, (list,tuple)):
1404 1404 indices_or_msg_ids = [indices_or_msg_ids]
1405 1405 single_result = True
1406 1406
1407 1407 theids = []
1408 1408 for id in indices_or_msg_ids:
1409 1409 if isinstance(id, int):
1410 1410 id = self.history[id]
1411 1411 if not isinstance(id, string_types):
1412 1412 raise TypeError("indices must be str or int, not %r"%id)
1413 1413 theids.append(id)
1414 1414
1415 1415 local_ids = [msg_id for msg_id in theids if (msg_id in self.outstanding or msg_id in self.results)]
1416 1416 remote_ids = [msg_id for msg_id in theids if msg_id not in local_ids]
1417 1417
1418 1418 # given single msg_id initially, get_result shot get the result itself,
1419 1419 # not a length-one list
1420 1420 if single_result:
1421 1421 theids = theids[0]
1422 1422
1423 1423 if remote_ids:
1424 1424 ar = AsyncHubResult(self, msg_ids=theids)
1425 1425 else:
1426 1426 ar = AsyncResult(self, msg_ids=theids)
1427 1427
1428 1428 if block:
1429 1429 ar.wait()
1430 1430
1431 1431 return ar
1432 1432
1433 1433 @spin_first
1434 1434 def resubmit(self, indices_or_msg_ids=None, metadata=None, block=None):
1435 1435 """Resubmit one or more tasks.
1436 1436
1437 1437 in-flight tasks may not be resubmitted.
1438 1438
1439 1439 Parameters
1440 1440 ----------
1441 1441
1442 1442 indices_or_msg_ids : integer history index, str msg_id, or list of either
1443 1443 The indices or msg_ids of indices to be retrieved
1444 1444
1445 1445 block : bool
1446 1446 Whether to wait for the result to be done
1447 1447
1448 1448 Returns
1449 1449 -------
1450 1450
1451 1451 AsyncHubResult
1452 1452 A subclass of AsyncResult that retrieves results from the Hub
1453 1453
1454 1454 """
1455 1455 block = self.block if block is None else block
1456 1456 if indices_or_msg_ids is None:
1457 1457 indices_or_msg_ids = -1
1458 1458
1459 1459 if not isinstance(indices_or_msg_ids, (list,tuple)):
1460 1460 indices_or_msg_ids = [indices_or_msg_ids]
1461 1461
1462 1462 theids = []
1463 1463 for id in indices_or_msg_ids:
1464 1464 if isinstance(id, int):
1465 1465 id = self.history[id]
1466 1466 if not isinstance(id, string_types):
1467 1467 raise TypeError("indices must be str or int, not %r"%id)
1468 1468 theids.append(id)
1469 1469
1470 1470 content = dict(msg_ids = theids)
1471 1471
1472 1472 self.session.send(self._query_socket, 'resubmit_request', content)
1473 1473
1474 1474 zmq.select([self._query_socket], [], [])
1475 1475 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1476 1476 if self.debug:
1477 1477 pprint(msg)
1478 1478 content = msg['content']
1479 1479 if content['status'] != 'ok':
1480 1480 raise self._unwrap_exception(content)
1481 1481 mapping = content['resubmitted']
1482 1482 new_ids = [ mapping[msg_id] for msg_id in theids ]
1483 1483
1484 1484 ar = AsyncHubResult(self, msg_ids=new_ids)
1485 1485
1486 1486 if block:
1487 1487 ar.wait()
1488 1488
1489 1489 return ar
1490 1490
1491 1491 @spin_first
1492 1492 def result_status(self, msg_ids, status_only=True):
1493 1493 """Check on the status of the result(s) of the apply request with `msg_ids`.
1494 1494
1495 1495 If status_only is False, then the actual results will be retrieved, else
1496 1496 only the status of the results will be checked.
1497 1497
1498 1498 Parameters
1499 1499 ----------
1500 1500
1501 1501 msg_ids : list of msg_ids
1502 1502 if int:
1503 1503 Passed as index to self.history for convenience.
1504 1504 status_only : bool (default: True)
1505 1505 if False:
1506 1506 Retrieve the actual results of completed tasks.
1507 1507
1508 1508 Returns
1509 1509 -------
1510 1510
1511 1511 results : dict
1512 1512 There will always be the keys 'pending' and 'completed', which will
1513 1513 be lists of msg_ids that are incomplete or complete. If `status_only`
1514 1514 is False, then completed results will be keyed by their `msg_id`.
1515 1515 """
1516 1516 if not isinstance(msg_ids, (list,tuple)):
1517 1517 msg_ids = [msg_ids]
1518 1518
1519 1519 theids = []
1520 1520 for msg_id in msg_ids:
1521 1521 if isinstance(msg_id, int):
1522 1522 msg_id = self.history[msg_id]
1523 1523 if not isinstance(msg_id, string_types):
1524 1524 raise TypeError("msg_ids must be str, not %r"%msg_id)
1525 1525 theids.append(msg_id)
1526 1526
1527 1527 completed = []
1528 1528 local_results = {}
1529 1529
1530 1530 # comment this block out to temporarily disable local shortcut:
1531 1531 for msg_id in theids:
1532 1532 if msg_id in self.results:
1533 1533 completed.append(msg_id)
1534 1534 local_results[msg_id] = self.results[msg_id]
1535 1535 theids.remove(msg_id)
1536 1536
1537 1537 if theids: # some not locally cached
1538 1538 content = dict(msg_ids=theids, status_only=status_only)
1539 1539 msg = self.session.send(self._query_socket, "result_request", content=content)
1540 1540 zmq.select([self._query_socket], [], [])
1541 1541 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1542 1542 if self.debug:
1543 1543 pprint(msg)
1544 1544 content = msg['content']
1545 1545 if content['status'] != 'ok':
1546 1546 raise self._unwrap_exception(content)
1547 1547 buffers = msg['buffers']
1548 1548 else:
1549 1549 content = dict(completed=[],pending=[])
1550 1550
1551 1551 content['completed'].extend(completed)
1552 1552
1553 1553 if status_only:
1554 1554 return content
1555 1555
1556 1556 failures = []
1557 1557 # load cached results into result:
1558 1558 content.update(local_results)
1559 1559
1560 1560 # update cache with results:
1561 1561 for msg_id in sorted(theids):
1562 1562 if msg_id in content['completed']:
1563 1563 rec = content[msg_id]
1564 1564 parent = extract_dates(rec['header'])
1565 1565 header = extract_dates(rec['result_header'])
1566 1566 rcontent = rec['result_content']
1567 1567 iodict = rec['io']
1568 1568 if isinstance(rcontent, str):
1569 1569 rcontent = self.session.unpack(rcontent)
1570 1570
1571 1571 md = self.metadata[msg_id]
1572 1572 md_msg = dict(
1573 1573 content=rcontent,
1574 1574 parent_header=parent,
1575 1575 header=header,
1576 1576 metadata=rec['result_metadata'],
1577 1577 )
1578 1578 md.update(self._extract_metadata(md_msg))
1579 1579 if rec.get('received'):
1580 1580 md['received'] = parse_date(rec['received'])
1581 1581 md.update(iodict)
1582 1582
1583 1583 if rcontent['status'] == 'ok':
1584 1584 if header['msg_type'] == 'apply_reply':
1585 1585 res,buffers = serialize.unserialize_object(buffers)
1586 1586 elif header['msg_type'] == 'execute_reply':
1587 1587 res = ExecuteReply(msg_id, rcontent, md)
1588 1588 else:
1589 1589 raise KeyError("unhandled msg type: %r" % header['msg_type'])
1590 1590 else:
1591 1591 res = self._unwrap_exception(rcontent)
1592 1592 failures.append(res)
1593 1593
1594 1594 self.results[msg_id] = res
1595 1595 content[msg_id] = res
1596 1596
1597 1597 if len(theids) == 1 and failures:
1598 1598 raise failures[0]
1599 1599
1600 1600 error.collect_exceptions(failures, "result_status")
1601 1601 return content
1602 1602
1603 1603 @spin_first
1604 1604 def queue_status(self, targets='all', verbose=False):
1605 1605 """Fetch the status of engine queues.
1606 1606
1607 1607 Parameters
1608 1608 ----------
1609 1609
1610 1610 targets : int/str/list of ints/strs
1611 1611 the engines whose states are to be queried.
1612 1612 default : all
1613 1613 verbose : bool
1614 1614 Whether to return lengths only, or lists of ids for each element
1615 1615 """
1616 1616 if targets == 'all':
1617 1617 # allow 'all' to be evaluated on the engine
1618 1618 engine_ids = None
1619 1619 else:
1620 1620 engine_ids = self._build_targets(targets)[1]
1621 1621 content = dict(targets=engine_ids, verbose=verbose)
1622 1622 self.session.send(self._query_socket, "queue_request", content=content)
1623 1623 idents,msg = self.session.recv(self._query_socket, 0)
1624 1624 if self.debug:
1625 1625 pprint(msg)
1626 1626 content = msg['content']
1627 1627 status = content.pop('status')
1628 1628 if status != 'ok':
1629 1629 raise self._unwrap_exception(content)
1630 1630 content = rekey(content)
1631 1631 if isinstance(targets, int):
1632 1632 return content[targets]
1633 1633 else:
1634 1634 return content
1635 1635
1636 1636 def _build_msgids_from_target(self, targets=None):
1637 1637 """Build a list of msg_ids from the list of engine targets"""
1638 1638 if not targets: # needed as _build_targets otherwise uses all engines
1639 1639 return []
1640 1640 target_ids = self._build_targets(targets)[0]
1641 1641 return [md_id for md_id in self.metadata if self.metadata[md_id]["engine_uuid"] in target_ids]
1642 1642
1643 1643 def _build_msgids_from_jobs(self, jobs=None):
1644 1644 """Build a list of msg_ids from "jobs" """
1645 1645 if not jobs:
1646 1646 return []
1647 1647 msg_ids = []
1648 1648 if isinstance(jobs, string_types + (AsyncResult,)):
1649 1649 jobs = [jobs]
1650 1650 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1651 1651 if bad_ids:
1652 1652 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1653 1653 for j in jobs:
1654 1654 if isinstance(j, AsyncResult):
1655 1655 msg_ids.extend(j.msg_ids)
1656 1656 else:
1657 1657 msg_ids.append(j)
1658 1658 return msg_ids
1659 1659
1660 1660 def purge_local_results(self, jobs=[], targets=[]):
1661 1661 """Clears the client caches of results and their metadata.
1662 1662
1663 1663 Individual results can be purged by msg_id, or the entire
1664 1664 history of specific targets can be purged.
1665 1665
1666 1666 Use `purge_local_results('all')` to scrub everything from the Clients's
1667 1667 results and metadata caches.
1668 1668
1669 1669 After this call all `AsyncResults` are invalid and should be discarded.
1670 1670
1671 1671 If you must "reget" the results, you can still do so by using
1672 1672 `client.get_result(msg_id)` or `client.get_result(asyncresult)`. This will
1673 1673 redownload the results from the hub if they are still available
1674 1674 (i.e `client.purge_hub_results(...)` has not been called.
1675 1675
1676 1676 Parameters
1677 1677 ----------
1678 1678
1679 1679 jobs : str or list of str or AsyncResult objects
1680 1680 the msg_ids whose results should be purged.
1681 1681 targets : int/list of ints
1682 1682 The engines, by integer ID, whose entire result histories are to be purged.
1683 1683
1684 1684 Raises
1685 1685 ------
1686 1686
1687 1687 RuntimeError : if any of the tasks to be purged are still outstanding.
1688 1688
1689 1689 """
1690 1690 if not targets and not jobs:
1691 1691 raise ValueError("Must specify at least one of `targets` and `jobs`")
1692 1692
1693 1693 if jobs == 'all':
1694 1694 if self.outstanding:
1695 1695 raise RuntimeError("Can't purge outstanding tasks: %s" % self.outstanding)
1696 1696 self.results.clear()
1697 1697 self.metadata.clear()
1698 1698 else:
1699 1699 msg_ids = set()
1700 1700 msg_ids.update(self._build_msgids_from_target(targets))
1701 1701 msg_ids.update(self._build_msgids_from_jobs(jobs))
1702 1702 still_outstanding = self.outstanding.intersection(msg_ids)
1703 1703 if still_outstanding:
1704 1704 raise RuntimeError("Can't purge outstanding tasks: %s" % still_outstanding)
1705 1705 for mid in msg_ids:
1706 1706 self.results.pop(mid)
1707 1707 self.metadata.pop(mid)
1708 1708
1709 1709
1710 1710 @spin_first
1711 1711 def purge_hub_results(self, jobs=[], targets=[]):
1712 1712 """Tell the Hub to forget results.
1713 1713
1714 1714 Individual results can be purged by msg_id, or the entire
1715 1715 history of specific targets can be purged.
1716 1716
1717 1717 Use `purge_results('all')` to scrub everything from the Hub's db.
1718 1718
1719 1719 Parameters
1720 1720 ----------
1721 1721
1722 1722 jobs : str or list of str or AsyncResult objects
1723 1723 the msg_ids whose results should be forgotten.
1724 1724 targets : int/str/list of ints/strs
1725 1725 The targets, by int_id, whose entire history is to be purged.
1726 1726
1727 1727 default : None
1728 1728 """
1729 1729 if not targets and not jobs:
1730 1730 raise ValueError("Must specify at least one of `targets` and `jobs`")
1731 1731 if targets:
1732 1732 targets = self._build_targets(targets)[1]
1733 1733
1734 1734 # construct msg_ids from jobs
1735 1735 if jobs == 'all':
1736 1736 msg_ids = jobs
1737 1737 else:
1738 1738 msg_ids = self._build_msgids_from_jobs(jobs)
1739 1739
1740 1740 content = dict(engine_ids=targets, msg_ids=msg_ids)
1741 1741 self.session.send(self._query_socket, "purge_request", content=content)
1742 1742 idents, msg = self.session.recv(self._query_socket, 0)
1743 1743 if self.debug:
1744 1744 pprint(msg)
1745 1745 content = msg['content']
1746 1746 if content['status'] != 'ok':
1747 1747 raise self._unwrap_exception(content)
1748 1748
1749 1749 def purge_results(self, jobs=[], targets=[]):
1750 1750 """Clears the cached results from both the hub and the local client
1751 1751
1752 1752 Individual results can be purged by msg_id, or the entire
1753 1753 history of specific targets can be purged.
1754 1754
1755 1755 Use `purge_results('all')` to scrub every cached result from both the Hub's and
1756 1756 the Client's db.
1757 1757
1758 1758 Equivalent to calling both `purge_hub_results()` and `purge_client_results()` with
1759 1759 the same arguments.
1760 1760
1761 1761 Parameters
1762 1762 ----------
1763 1763
1764 1764 jobs : str or list of str or AsyncResult objects
1765 1765 the msg_ids whose results should be forgotten.
1766 1766 targets : int/str/list of ints/strs
1767 1767 The targets, by int_id, whose entire history is to be purged.
1768 1768
1769 1769 default : None
1770 1770 """
1771 1771 self.purge_local_results(jobs=jobs, targets=targets)
1772 1772 self.purge_hub_results(jobs=jobs, targets=targets)
1773 1773
1774 1774 def purge_everything(self):
1775 1775 """Clears all content from previous Tasks from both the hub and the local client
1776 1776
1777 1777 In addition to calling `purge_results("all")` it also deletes the history and
1778 1778 other bookkeeping lists.
1779 1779 """
1780 1780 self.purge_results("all")
1781 1781 self.history = []
1782 1782 self.session.digest_history.clear()
1783 1783
1784 1784 @spin_first
1785 1785 def hub_history(self):
1786 1786 """Get the Hub's history
1787 1787
1788 1788 Just like the Client, the Hub has a history, which is a list of msg_ids.
1789 1789 This will contain the history of all clients, and, depending on configuration,
1790 1790 may contain history across multiple cluster sessions.
1791 1791
1792 1792 Any msg_id returned here is a valid argument to `get_result`.
1793 1793
1794 1794 Returns
1795 1795 -------
1796 1796
1797 1797 msg_ids : list of strs
1798 1798 list of all msg_ids, ordered by task submission time.
1799 1799 """
1800 1800
1801 1801 self.session.send(self._query_socket, "history_request", content={})
1802 1802 idents, msg = self.session.recv(self._query_socket, 0)
1803 1803
1804 1804 if self.debug:
1805 1805 pprint(msg)
1806 1806 content = msg['content']
1807 1807 if content['status'] != 'ok':
1808 1808 raise self._unwrap_exception(content)
1809 1809 else:
1810 1810 return content['history']
1811 1811
1812 1812 @spin_first
1813 1813 def db_query(self, query, keys=None):
1814 1814 """Query the Hub's TaskRecord database
1815 1815
1816 1816 This will return a list of task record dicts that match `query`
1817 1817
1818 1818 Parameters
1819 1819 ----------
1820 1820
1821 1821 query : mongodb query dict
1822 1822 The search dict. See mongodb query docs for details.
1823 1823 keys : list of strs [optional]
1824 1824 The subset of keys to be returned. The default is to fetch everything but buffers.
1825 1825 'msg_id' will *always* be included.
1826 1826 """
1827 1827 if isinstance(keys, string_types):
1828 1828 keys = [keys]
1829 1829 content = dict(query=query, keys=keys)
1830 1830 self.session.send(self._query_socket, "db_request", content=content)
1831 1831 idents, msg = self.session.recv(self._query_socket, 0)
1832 1832 if self.debug:
1833 1833 pprint(msg)
1834 1834 content = msg['content']
1835 1835 if content['status'] != 'ok':
1836 1836 raise self._unwrap_exception(content)
1837 1837
1838 1838 records = content['records']
1839 1839
1840 1840 buffer_lens = content['buffer_lens']
1841 1841 result_buffer_lens = content['result_buffer_lens']
1842 1842 buffers = msg['buffers']
1843 1843 has_bufs = buffer_lens is not None
1844 1844 has_rbufs = result_buffer_lens is not None
1845 1845 for i,rec in enumerate(records):
1846 1846 # unpack datetime objects
1847 1847 for hkey in ('header', 'result_header'):
1848 1848 if hkey in rec:
1849 1849 rec[hkey] = extract_dates(rec[hkey])
1850 1850 for dtkey in ('submitted', 'started', 'completed', 'received'):
1851 1851 if dtkey in rec:
1852 1852 rec[dtkey] = parse_date(rec[dtkey])
1853 1853 # relink buffers
1854 1854 if has_bufs:
1855 1855 blen = buffer_lens[i]
1856 1856 rec['buffers'], buffers = buffers[:blen],buffers[blen:]
1857 1857 if has_rbufs:
1858 1858 blen = result_buffer_lens[i]
1859 1859 rec['result_buffers'], buffers = buffers[:blen],buffers[blen:]
1860 1860
1861 1861 return records
1862 1862
1863 1863 __all__ = [ 'Client' ]
General Comments 0
You need to be logged in to leave comments. Login now