##// END OF EJS Templates
remove `source` key from display_data
MinRK -
Show More
@@ -1,812 +1,812 b''
1 1 # -*- coding: utf-8 -*-
2 2 """Top-level display functions for displaying object in different formats.
3 3
4 4 Authors:
5 5
6 6 * Brian Granger
7 7 """
8 8
9 9 #-----------------------------------------------------------------------------
10 10 # Copyright (C) 2013 The IPython Development Team
11 11 #
12 12 # Distributed under the terms of the BSD License. The full license is in
13 13 # the file COPYING, distributed as part of this software.
14 14 #-----------------------------------------------------------------------------
15 15
16 16 #-----------------------------------------------------------------------------
17 17 # Imports
18 18 #-----------------------------------------------------------------------------
19 19
20 20 from __future__ import print_function
21 21
22 22 import os
23 23 import struct
24 24
25 25 from IPython.core.formatters import _safe_get_formatter_method
26 26 from IPython.utils.py3compat import (string_types, cast_bytes_py2, cast_unicode,
27 27 unicode_type)
28 28 from IPython.testing.skipdoctest import skip_doctest
29 29 from .displaypub import publish_display_data
30 30
31 31 #-----------------------------------------------------------------------------
32 32 # utility functions
33 33 #-----------------------------------------------------------------------------
34 34
35 35 def _safe_exists(path):
36 36 """Check path, but don't let exceptions raise"""
37 37 try:
38 38 return os.path.exists(path)
39 39 except Exception:
40 40 return False
41 41
42 42 def _merge(d1, d2):
43 43 """Like update, but merges sub-dicts instead of clobbering at the top level.
44 44
45 45 Updates d1 in-place
46 46 """
47 47
48 48 if not isinstance(d2, dict) or not isinstance(d1, dict):
49 49 return d2
50 50 for key, value in d2.items():
51 51 d1[key] = _merge(d1.get(key), value)
52 52 return d1
53 53
54 54 def _display_mimetype(mimetype, objs, raw=False, metadata=None):
55 55 """internal implementation of all display_foo methods
56 56
57 57 Parameters
58 58 ----------
59 59 mimetype : str
60 60 The mimetype to be published (e.g. 'image/png')
61 61 objs : tuple of objects
62 62 The Python objects to display, or if raw=True raw text data to
63 63 display.
64 64 raw : bool
65 65 Are the data objects raw data or Python objects that need to be
66 66 formatted before display? [default: False]
67 67 metadata : dict (optional)
68 68 Metadata to be associated with the specific mimetype output.
69 69 """
70 70 if metadata:
71 71 metadata = {mimetype: metadata}
72 72 if raw:
73 73 # turn list of pngdata into list of { 'image/png': pngdata }
74 74 objs = [ {mimetype: obj} for obj in objs ]
75 75 display(*objs, raw=raw, metadata=metadata, include=[mimetype])
76 76
77 77 #-----------------------------------------------------------------------------
78 78 # Main functions
79 79 #-----------------------------------------------------------------------------
80 80
81 81 def display(*objs, **kwargs):
82 82 """Display a Python object in all frontends.
83 83
84 84 By default all representations will be computed and sent to the frontends.
85 85 Frontends can decide which representation is used and how.
86 86
87 87 Parameters
88 88 ----------
89 89 objs : tuple of objects
90 90 The Python objects to display.
91 91 raw : bool, optional
92 92 Are the objects to be displayed already mimetype-keyed dicts of raw display data,
93 93 or Python objects that need to be formatted before display? [default: False]
94 94 include : list or tuple, optional
95 95 A list of format type strings (MIME types) to include in the
96 96 format data dict. If this is set *only* the format types included
97 97 in this list will be computed.
98 98 exclude : list or tuple, optional
99 99 A list of format type strings (MIME types) to exclude in the format
100 100 data dict. If this is set all format types will be computed,
101 101 except for those included in this argument.
102 102 metadata : dict, optional
103 103 A dictionary of metadata to associate with the output.
104 104 mime-type keys in this dictionary will be associated with the individual
105 105 representation formats, if they exist.
106 106 """
107 107 raw = kwargs.get('raw', False)
108 108 include = kwargs.get('include')
109 109 exclude = kwargs.get('exclude')
110 110 metadata = kwargs.get('metadata')
111 111
112 112 from IPython.core.interactiveshell import InteractiveShell
113 113
114 114 if not raw:
115 115 format = InteractiveShell.instance().display_formatter.format
116 116
117 117 for obj in objs:
118 118
119 119 # If _ipython_display_ is defined, use that to display this object.
120 120 display_method = _safe_get_formatter_method(obj, '_ipython_display_')
121 121 if display_method is not None:
122 122 try:
123 123 display_method(**kwargs)
124 124 except NotImplementedError:
125 125 pass
126 126 else:
127 127 continue
128 128 if raw:
129 publish_display_data('display', obj, metadata)
129 publish_display_data(data=obj, metadata=metadata)
130 130 else:
131 131 format_dict, md_dict = format(obj, include=include, exclude=exclude)
132 132 if metadata:
133 133 # kwarg-specified metadata gets precedence
134 134 _merge(md_dict, metadata)
135 publish_display_data('display', format_dict, md_dict)
135 publish_display_data(data=format_dict, metadata=md_dict)
136 136
137 137
138 138 def display_pretty(*objs, **kwargs):
139 139 """Display the pretty (default) representation of an object.
140 140
141 141 Parameters
142 142 ----------
143 143 objs : tuple of objects
144 144 The Python objects to display, or if raw=True raw text data to
145 145 display.
146 146 raw : bool
147 147 Are the data objects raw data or Python objects that need to be
148 148 formatted before display? [default: False]
149 149 metadata : dict (optional)
150 150 Metadata to be associated with the specific mimetype output.
151 151 """
152 152 _display_mimetype('text/plain', objs, **kwargs)
153 153
154 154
155 155 def display_html(*objs, **kwargs):
156 156 """Display the HTML representation of an object.
157 157
158 158 Parameters
159 159 ----------
160 160 objs : tuple of objects
161 161 The Python objects to display, or if raw=True raw HTML data to
162 162 display.
163 163 raw : bool
164 164 Are the data objects raw data or Python objects that need to be
165 165 formatted before display? [default: False]
166 166 metadata : dict (optional)
167 167 Metadata to be associated with the specific mimetype output.
168 168 """
169 169 _display_mimetype('text/html', objs, **kwargs)
170 170
171 171
172 172 def display_markdown(*objs, **kwargs):
173 173 """Displays the Markdown representation of an object.
174 174
175 175 Parameters
176 176 ----------
177 177 objs : tuple of objects
178 178 The Python objects to display, or if raw=True raw markdown data to
179 179 display.
180 180 raw : bool
181 181 Are the data objects raw data or Python objects that need to be
182 182 formatted before display? [default: False]
183 183 metadata : dict (optional)
184 184 Metadata to be associated with the specific mimetype output.
185 185 """
186 186
187 187 _display_mimetype('text/markdown', objs, **kwargs)
188 188
189 189
190 190 def display_svg(*objs, **kwargs):
191 191 """Display the SVG representation of an object.
192 192
193 193 Parameters
194 194 ----------
195 195 objs : tuple of objects
196 196 The Python objects to display, or if raw=True raw svg data to
197 197 display.
198 198 raw : bool
199 199 Are the data objects raw data or Python objects that need to be
200 200 formatted before display? [default: False]
201 201 metadata : dict (optional)
202 202 Metadata to be associated with the specific mimetype output.
203 203 """
204 204 _display_mimetype('image/svg+xml', objs, **kwargs)
205 205
206 206
207 207 def display_png(*objs, **kwargs):
208 208 """Display the PNG representation of an object.
209 209
210 210 Parameters
211 211 ----------
212 212 objs : tuple of objects
213 213 The Python objects to display, or if raw=True raw png data to
214 214 display.
215 215 raw : bool
216 216 Are the data objects raw data or Python objects that need to be
217 217 formatted before display? [default: False]
218 218 metadata : dict (optional)
219 219 Metadata to be associated with the specific mimetype output.
220 220 """
221 221 _display_mimetype('image/png', objs, **kwargs)
222 222
223 223
224 224 def display_jpeg(*objs, **kwargs):
225 225 """Display the JPEG representation of an object.
226 226
227 227 Parameters
228 228 ----------
229 229 objs : tuple of objects
230 230 The Python objects to display, or if raw=True raw JPEG data to
231 231 display.
232 232 raw : bool
233 233 Are the data objects raw data or Python objects that need to be
234 234 formatted before display? [default: False]
235 235 metadata : dict (optional)
236 236 Metadata to be associated with the specific mimetype output.
237 237 """
238 238 _display_mimetype('image/jpeg', objs, **kwargs)
239 239
240 240
241 241 def display_latex(*objs, **kwargs):
242 242 """Display the LaTeX representation of an object.
243 243
244 244 Parameters
245 245 ----------
246 246 objs : tuple of objects
247 247 The Python objects to display, or if raw=True raw latex data to
248 248 display.
249 249 raw : bool
250 250 Are the data objects raw data or Python objects that need to be
251 251 formatted before display? [default: False]
252 252 metadata : dict (optional)
253 253 Metadata to be associated with the specific mimetype output.
254 254 """
255 255 _display_mimetype('text/latex', objs, **kwargs)
256 256
257 257
258 258 def display_json(*objs, **kwargs):
259 259 """Display the JSON representation of an object.
260 260
261 261 Note that not many frontends support displaying JSON.
262 262
263 263 Parameters
264 264 ----------
265 265 objs : tuple of objects
266 266 The Python objects to display, or if raw=True raw json data to
267 267 display.
268 268 raw : bool
269 269 Are the data objects raw data or Python objects that need to be
270 270 formatted before display? [default: False]
271 271 metadata : dict (optional)
272 272 Metadata to be associated with the specific mimetype output.
273 273 """
274 274 _display_mimetype('application/json', objs, **kwargs)
275 275
276 276
277 277 def display_javascript(*objs, **kwargs):
278 278 """Display the Javascript representation of an object.
279 279
280 280 Parameters
281 281 ----------
282 282 objs : tuple of objects
283 283 The Python objects to display, or if raw=True raw javascript data to
284 284 display.
285 285 raw : bool
286 286 Are the data objects raw data or Python objects that need to be
287 287 formatted before display? [default: False]
288 288 metadata : dict (optional)
289 289 Metadata to be associated with the specific mimetype output.
290 290 """
291 291 _display_mimetype('application/javascript', objs, **kwargs)
292 292
293 293
294 294 def display_pdf(*objs, **kwargs):
295 295 """Display the PDF representation of an object.
296 296
297 297 Parameters
298 298 ----------
299 299 objs : tuple of objects
300 300 The Python objects to display, or if raw=True raw javascript data to
301 301 display.
302 302 raw : bool
303 303 Are the data objects raw data or Python objects that need to be
304 304 formatted before display? [default: False]
305 305 metadata : dict (optional)
306 306 Metadata to be associated with the specific mimetype output.
307 307 """
308 308 _display_mimetype('application/pdf', objs, **kwargs)
309 309
310 310
311 311 #-----------------------------------------------------------------------------
312 312 # Smart classes
313 313 #-----------------------------------------------------------------------------
314 314
315 315
316 316 class DisplayObject(object):
317 317 """An object that wraps data to be displayed."""
318 318
319 319 _read_flags = 'r'
320 320 _show_mem_addr = False
321 321
322 322 def __init__(self, data=None, url=None, filename=None):
323 323 """Create a display object given raw data.
324 324
325 325 When this object is returned by an expression or passed to the
326 326 display function, it will result in the data being displayed
327 327 in the frontend. The MIME type of the data should match the
328 328 subclasses used, so the Png subclass should be used for 'image/png'
329 329 data. If the data is a URL, the data will first be downloaded
330 330 and then displayed. If
331 331
332 332 Parameters
333 333 ----------
334 334 data : unicode, str or bytes
335 335 The raw data or a URL or file to load the data from
336 336 url : unicode
337 337 A URL to download the data from.
338 338 filename : unicode
339 339 Path to a local file to load the data from.
340 340 """
341 341 if data is not None and isinstance(data, string_types):
342 342 if data.startswith('http') and url is None:
343 343 url = data
344 344 filename = None
345 345 data = None
346 346 elif _safe_exists(data) and filename is None:
347 347 url = None
348 348 filename = data
349 349 data = None
350 350
351 351 self.data = data
352 352 self.url = url
353 353 self.filename = None if filename is None else unicode_type(filename)
354 354
355 355 self.reload()
356 356 self._check_data()
357 357
358 358 def __repr__(self):
359 359 if not self._show_mem_addr:
360 360 cls = self.__class__
361 361 r = "<%s.%s object>" % (cls.__module__, cls.__name__)
362 362 else:
363 363 r = super(DisplayObject, self).__repr__()
364 364 return r
365 365
366 366 def _check_data(self):
367 367 """Override in subclasses if there's something to check."""
368 368 pass
369 369
370 370 def reload(self):
371 371 """Reload the raw data from file or URL."""
372 372 if self.filename is not None:
373 373 with open(self.filename, self._read_flags) as f:
374 374 self.data = f.read()
375 375 elif self.url is not None:
376 376 try:
377 377 try:
378 378 from urllib.request import urlopen # Py3
379 379 except ImportError:
380 380 from urllib2 import urlopen
381 381 response = urlopen(self.url)
382 382 self.data = response.read()
383 383 # extract encoding from header, if there is one:
384 384 encoding = None
385 385 for sub in response.headers['content-type'].split(';'):
386 386 sub = sub.strip()
387 387 if sub.startswith('charset'):
388 388 encoding = sub.split('=')[-1].strip()
389 389 break
390 390 # decode data, if an encoding was specified
391 391 if encoding:
392 392 self.data = self.data.decode(encoding, 'replace')
393 393 except:
394 394 self.data = None
395 395
396 396 class TextDisplayObject(DisplayObject):
397 397 """Validate that display data is text"""
398 398 def _check_data(self):
399 399 if self.data is not None and not isinstance(self.data, string_types):
400 400 raise TypeError("%s expects text, not %r" % (self.__class__.__name__, self.data))
401 401
402 402 class Pretty(TextDisplayObject):
403 403
404 404 def _repr_pretty_(self):
405 405 return self.data
406 406
407 407
408 408 class HTML(TextDisplayObject):
409 409
410 410 def _repr_html_(self):
411 411 return self.data
412 412
413 413 def __html__(self):
414 414 """
415 415 This method exists to inform other HTML-using modules (e.g. Markupsafe,
416 416 htmltag, etc) that this object is HTML and does not need things like
417 417 special characters (<>&) escaped.
418 418 """
419 419 return self._repr_html_()
420 420
421 421
422 422 class Markdown(TextDisplayObject):
423 423
424 424 def _repr_markdown_(self):
425 425 return self.data
426 426
427 427
428 428 class Math(TextDisplayObject):
429 429
430 430 def _repr_latex_(self):
431 431 s = self.data.strip('$')
432 432 return "$$%s$$" % s
433 433
434 434
435 435 class Latex(TextDisplayObject):
436 436
437 437 def _repr_latex_(self):
438 438 return self.data
439 439
440 440
441 441 class SVG(DisplayObject):
442 442
443 443 # wrap data in a property, which extracts the <svg> tag, discarding
444 444 # document headers
445 445 _data = None
446 446
447 447 @property
448 448 def data(self):
449 449 return self._data
450 450
451 451 @data.setter
452 452 def data(self, svg):
453 453 if svg is None:
454 454 self._data = None
455 455 return
456 456 # parse into dom object
457 457 from xml.dom import minidom
458 458 svg = cast_bytes_py2(svg)
459 459 x = minidom.parseString(svg)
460 460 # get svg tag (should be 1)
461 461 found_svg = x.getElementsByTagName('svg')
462 462 if found_svg:
463 463 svg = found_svg[0].toxml()
464 464 else:
465 465 # fallback on the input, trust the user
466 466 # but this is probably an error.
467 467 pass
468 468 svg = cast_unicode(svg)
469 469 self._data = svg
470 470
471 471 def _repr_svg_(self):
472 472 return self.data
473 473
474 474
475 475 class JSON(TextDisplayObject):
476 476
477 477 def _repr_json_(self):
478 478 return self.data
479 479
480 480 css_t = """$("head").append($("<link/>").attr({
481 481 rel: "stylesheet",
482 482 type: "text/css",
483 483 href: "%s"
484 484 }));
485 485 """
486 486
487 487 lib_t1 = """$.getScript("%s", function () {
488 488 """
489 489 lib_t2 = """});
490 490 """
491 491
492 492 class Javascript(TextDisplayObject):
493 493
494 494 def __init__(self, data=None, url=None, filename=None, lib=None, css=None):
495 495 """Create a Javascript display object given raw data.
496 496
497 497 When this object is returned by an expression or passed to the
498 498 display function, it will result in the data being displayed
499 499 in the frontend. If the data is a URL, the data will first be
500 500 downloaded and then displayed.
501 501
502 502 In the Notebook, the containing element will be available as `element`,
503 503 and jQuery will be available. The output area starts hidden, so if
504 504 the js appends content to `element` that should be visible, then
505 505 it must call `container.show()` to unhide the area.
506 506
507 507 Parameters
508 508 ----------
509 509 data : unicode, str or bytes
510 510 The Javascript source code or a URL to download it from.
511 511 url : unicode
512 512 A URL to download the data from.
513 513 filename : unicode
514 514 Path to a local file to load the data from.
515 515 lib : list or str
516 516 A sequence of Javascript library URLs to load asynchronously before
517 517 running the source code. The full URLs of the libraries should
518 518 be given. A single Javascript library URL can also be given as a
519 519 string.
520 520 css: : list or str
521 521 A sequence of css files to load before running the source code.
522 522 The full URLs of the css files should be given. A single css URL
523 523 can also be given as a string.
524 524 """
525 525 if isinstance(lib, string_types):
526 526 lib = [lib]
527 527 elif lib is None:
528 528 lib = []
529 529 if isinstance(css, string_types):
530 530 css = [css]
531 531 elif css is None:
532 532 css = []
533 533 if not isinstance(lib, (list,tuple)):
534 534 raise TypeError('expected sequence, got: %r' % lib)
535 535 if not isinstance(css, (list,tuple)):
536 536 raise TypeError('expected sequence, got: %r' % css)
537 537 self.lib = lib
538 538 self.css = css
539 539 super(Javascript, self).__init__(data=data, url=url, filename=filename)
540 540
541 541 def _repr_javascript_(self):
542 542 r = ''
543 543 for c in self.css:
544 544 r += css_t % c
545 545 for l in self.lib:
546 546 r += lib_t1 % l
547 547 r += self.data
548 548 r += lib_t2*len(self.lib)
549 549 return r
550 550
551 551 # constants for identifying png/jpeg data
552 552 _PNG = b'\x89PNG\r\n\x1a\n'
553 553 _JPEG = b'\xff\xd8'
554 554
555 555 def _pngxy(data):
556 556 """read the (width, height) from a PNG header"""
557 557 ihdr = data.index(b'IHDR')
558 558 # next 8 bytes are width/height
559 559 w4h4 = data[ihdr+4:ihdr+12]
560 560 return struct.unpack('>ii', w4h4)
561 561
562 562 def _jpegxy(data):
563 563 """read the (width, height) from a JPEG header"""
564 564 # adapted from http://www.64lines.com/jpeg-width-height
565 565
566 566 idx = 4
567 567 while True:
568 568 block_size = struct.unpack('>H', data[idx:idx+2])[0]
569 569 idx = idx + block_size
570 570 if data[idx:idx+2] == b'\xFF\xC0':
571 571 # found Start of Frame
572 572 iSOF = idx
573 573 break
574 574 else:
575 575 # read another block
576 576 idx += 2
577 577
578 578 h, w = struct.unpack('>HH', data[iSOF+5:iSOF+9])
579 579 return w, h
580 580
581 581 class Image(DisplayObject):
582 582
583 583 _read_flags = 'rb'
584 584 _FMT_JPEG = u'jpeg'
585 585 _FMT_PNG = u'png'
586 586 _ACCEPTABLE_EMBEDDINGS = [_FMT_JPEG, _FMT_PNG]
587 587
588 588 def __init__(self, data=None, url=None, filename=None, format=u'png', embed=None, width=None, height=None, retina=False):
589 589 """Create a PNG/JPEG image object given raw data.
590 590
591 591 When this object is returned by an input cell or passed to the
592 592 display function, it will result in the image being displayed
593 593 in the frontend.
594 594
595 595 Parameters
596 596 ----------
597 597 data : unicode, str or bytes
598 598 The raw image data or a URL or filename to load the data from.
599 599 This always results in embedded image data.
600 600 url : unicode
601 601 A URL to download the data from. If you specify `url=`,
602 602 the image data will not be embedded unless you also specify `embed=True`.
603 603 filename : unicode
604 604 Path to a local file to load the data from.
605 605 Images from a file are always embedded.
606 606 format : unicode
607 607 The format of the image data (png/jpeg/jpg). If a filename or URL is given
608 608 for format will be inferred from the filename extension.
609 609 embed : bool
610 610 Should the image data be embedded using a data URI (True) or be
611 611 loaded using an <img> tag. Set this to True if you want the image
612 612 to be viewable later with no internet connection in the notebook.
613 613
614 614 Default is `True`, unless the keyword argument `url` is set, then
615 615 default value is `False`.
616 616
617 617 Note that QtConsole is not able to display images if `embed` is set to `False`
618 618 width : int
619 619 Width to which to constrain the image in html
620 620 height : int
621 621 Height to which to constrain the image in html
622 622 retina : bool
623 623 Automatically set the width and height to half of the measured
624 624 width and height.
625 625 This only works for embedded images because it reads the width/height
626 626 from image data.
627 627 For non-embedded images, you can just set the desired display width
628 628 and height directly.
629 629
630 630 Examples
631 631 --------
632 632 # embedded image data, works in qtconsole and notebook
633 633 # when passed positionally, the first arg can be any of raw image data,
634 634 # a URL, or a filename from which to load image data.
635 635 # The result is always embedding image data for inline images.
636 636 Image('http://www.google.fr/images/srpr/logo3w.png')
637 637 Image('/path/to/image.jpg')
638 638 Image(b'RAW_PNG_DATA...')
639 639
640 640 # Specifying Image(url=...) does not embed the image data,
641 641 # it only generates `<img>` tag with a link to the source.
642 642 # This will not work in the qtconsole or offline.
643 643 Image(url='http://www.google.fr/images/srpr/logo3w.png')
644 644
645 645 """
646 646 if filename is not None:
647 647 ext = self._find_ext(filename)
648 648 elif url is not None:
649 649 ext = self._find_ext(url)
650 650 elif data is None:
651 651 raise ValueError("No image data found. Expecting filename, url, or data.")
652 652 elif isinstance(data, string_types) and (
653 653 data.startswith('http') or _safe_exists(data)
654 654 ):
655 655 ext = self._find_ext(data)
656 656 else:
657 657 ext = None
658 658
659 659 if ext is not None:
660 660 format = ext.lower()
661 661 if ext == u'jpg' or ext == u'jpeg':
662 662 format = self._FMT_JPEG
663 663 if ext == u'png':
664 664 format = self._FMT_PNG
665 665 elif isinstance(data, bytes) and format == 'png':
666 666 # infer image type from image data header,
667 667 # only if format might not have been specified.
668 668 if data[:2] == _JPEG:
669 669 format = 'jpeg'
670 670
671 671 self.format = unicode_type(format).lower()
672 672 self.embed = embed if embed is not None else (url is None)
673 673
674 674 if self.embed and self.format not in self._ACCEPTABLE_EMBEDDINGS:
675 675 raise ValueError("Cannot embed the '%s' image format" % (self.format))
676 676 self.width = width
677 677 self.height = height
678 678 self.retina = retina
679 679 super(Image, self).__init__(data=data, url=url, filename=filename)
680 680
681 681 if retina:
682 682 self._retina_shape()
683 683
684 684 def _retina_shape(self):
685 685 """load pixel-doubled width and height from image data"""
686 686 if not self.embed:
687 687 return
688 688 if self.format == 'png':
689 689 w, h = _pngxy(self.data)
690 690 elif self.format == 'jpeg':
691 691 w, h = _jpegxy(self.data)
692 692 else:
693 693 # retina only supports png
694 694 return
695 695 self.width = w // 2
696 696 self.height = h // 2
697 697
698 698 def reload(self):
699 699 """Reload the raw data from file or URL."""
700 700 if self.embed:
701 701 super(Image,self).reload()
702 702 if self.retina:
703 703 self._retina_shape()
704 704
705 705 def _repr_html_(self):
706 706 if not self.embed:
707 707 width = height = ''
708 708 if self.width:
709 709 width = ' width="%d"' % self.width
710 710 if self.height:
711 711 height = ' height="%d"' % self.height
712 712 return u'<img src="%s"%s%s/>' % (self.url, width, height)
713 713
714 714 def _data_and_metadata(self):
715 715 """shortcut for returning metadata with shape information, if defined"""
716 716 md = {}
717 717 if self.width:
718 718 md['width'] = self.width
719 719 if self.height:
720 720 md['height'] = self.height
721 721 if md:
722 722 return self.data, md
723 723 else:
724 724 return self.data
725 725
726 726 def _repr_png_(self):
727 727 if self.embed and self.format == u'png':
728 728 return self._data_and_metadata()
729 729
730 730 def _repr_jpeg_(self):
731 731 if self.embed and (self.format == u'jpeg' or self.format == u'jpg'):
732 732 return self._data_and_metadata()
733 733
734 734 def _find_ext(self, s):
735 735 return unicode_type(s.split('.')[-1].lower())
736 736
737 737
738 738 def clear_output(wait=False):
739 739 """Clear the output of the current cell receiving output.
740 740
741 741 Parameters
742 742 ----------
743 743 wait : bool [default: false]
744 744 Wait to clear the output until new output is available to replace it."""
745 745 from IPython.core.interactiveshell import InteractiveShell
746 746 if InteractiveShell.initialized():
747 747 InteractiveShell.instance().display_pub.clear_output(wait)
748 748 else:
749 749 from IPython.utils import io
750 750 print('\033[2K\r', file=io.stdout, end='')
751 751 io.stdout.flush()
752 752 print('\033[2K\r', file=io.stderr, end='')
753 753 io.stderr.flush()
754 754
755 755
756 756 @skip_doctest
757 757 def set_matplotlib_formats(*formats, **kwargs):
758 758 """Select figure formats for the inline backend. Optionally pass quality for JPEG.
759 759
760 760 For example, this enables PNG and JPEG output with a JPEG quality of 90%::
761 761
762 762 In [1]: set_matplotlib_formats('png', 'jpeg', quality=90)
763 763
764 764 To set this in your config files use the following::
765 765
766 766 c.InlineBackend.figure_formats = {'png', 'jpeg'}
767 767 c.InlineBackend.print_figure_kwargs.update({'quality' : 90})
768 768
769 769 Parameters
770 770 ----------
771 771 *formats : strs
772 772 One or more figure formats to enable: 'png', 'retina', 'jpeg', 'svg', 'pdf'.
773 773 **kwargs :
774 774 Keyword args will be relayed to ``figure.canvas.print_figure``.
775 775 """
776 776 from IPython.core.interactiveshell import InteractiveShell
777 777 from IPython.core.pylabtools import select_figure_formats
778 778 from IPython.kernel.zmq.pylab.config import InlineBackend
779 779 # build kwargs, starting with InlineBackend config
780 780 kw = {}
781 781 cfg = InlineBackend.instance()
782 782 kw.update(cfg.print_figure_kwargs)
783 783 kw.update(**kwargs)
784 784 shell = InteractiveShell.instance()
785 785 select_figure_formats(shell, formats, **kw)
786 786
787 787 @skip_doctest
788 788 def set_matplotlib_close(close=True):
789 789 """Set whether the inline backend closes all figures automatically or not.
790 790
791 791 By default, the inline backend used in the IPython Notebook will close all
792 792 matplotlib figures automatically after each cell is run. This means that
793 793 plots in different cells won't interfere. Sometimes, you may want to make
794 794 a plot in one cell and then refine it in later cells. This can be accomplished
795 795 by::
796 796
797 797 In [1]: set_matplotlib_close(False)
798 798
799 799 To set this in your config files use the following::
800 800
801 801 c.InlineBackend.close_figures = False
802 802
803 803 Parameters
804 804 ----------
805 805 close : bool
806 806 Should all matplotlib figures be automatically closed after each cell is
807 807 run?
808 808 """
809 809 from IPython.kernel.zmq.pylab.config import InlineBackend
810 810 cfg = InlineBackend.instance()
811 811 cfg.close_figures = close
812 812
@@ -1,179 +1,159 b''
1 1 """An interface for publishing rich data to frontends.
2 2
3 3 There are two components of the display system:
4 4
5 5 * Display formatters, which take a Python object and compute the
6 6 representation of the object in various formats (text, HTML, SVG, etc.).
7 7 * The display publisher that is used to send the representation data to the
8 8 various frontends.
9 9
10 10 This module defines the logic display publishing. The display publisher uses
11 11 the ``display_data`` message type that is defined in the IPython messaging
12 12 spec.
13
14 Authors:
15
16 * Brian Granger
17 13 """
18 14
19 #-----------------------------------------------------------------------------
20 # Copyright (C) 2008-2011 The IPython Development Team
21 #
22 # Distributed under the terms of the BSD License. The full license is in
23 # the file COPYING, distributed as part of this software.
24 #-----------------------------------------------------------------------------
25
26 #-----------------------------------------------------------------------------
27 # Imports
28 #-----------------------------------------------------------------------------
15 # Copyright (c) IPython Development Team.
16 # Distributed under the terms of the Modified BSD License.
29 17
30 18 from __future__ import print_function
31 19
32 20 from IPython.config.configurable import Configurable
33 21 from IPython.utils import io
34 22 from IPython.utils.py3compat import string_types
35 23 from IPython.utils.traitlets import List
36 24
37 25 #-----------------------------------------------------------------------------
38 26 # Main payload class
39 27 #-----------------------------------------------------------------------------
40 28
41 29 class DisplayPublisher(Configurable):
42 30 """A traited class that publishes display data to frontends.
43 31
44 32 Instances of this class are created by the main IPython object and should
45 33 be accessed there.
46 34 """
47 35
48 def _validate_data(self, source, data, metadata=None):
36 def _validate_data(self, data, metadata=None):
49 37 """Validate the display data.
50 38
51 39 Parameters
52 40 ----------
53 source : str
54 The fully dotted name of the callable that created the data, like
55 :func:`foo.bar.my_formatter`.
56 41 data : dict
57 42 The formata data dictionary.
58 43 metadata : dict
59 44 Any metadata for the data.
60 45 """
61 46
62 if not isinstance(source, string_types):
63 raise TypeError('source must be a str, got: %r' % source)
64 47 if not isinstance(data, dict):
65 48 raise TypeError('data must be a dict, got: %r' % data)
66 49 if metadata is not None:
67 50 if not isinstance(metadata, dict):
68 51 raise TypeError('metadata must be a dict, got: %r' % data)
69 52
70 def publish(self, source, data, metadata=None):
53 def publish(self, data, metadata=None, source=None):
71 54 """Publish data and metadata to all frontends.
72 55
73 56 See the ``display_data`` message in the messaging documentation for
74 57 more details about this message type.
75 58
76 59 The following MIME types are currently implemented:
77 60
78 61 * text/plain
79 62 * text/html
80 63 * text/markdown
81 64 * text/latex
82 65 * application/json
83 66 * application/javascript
84 67 * image/png
85 68 * image/jpeg
86 69 * image/svg+xml
87 70
88 71 Parameters
89 72 ----------
90 source : str
91 A string that give the function or method that created the data,
92 such as 'IPython.core.page'.
93 73 data : dict
94 74 A dictionary having keys that are valid MIME types (like
95 75 'text/plain' or 'image/svg+xml') and values that are the data for
96 76 that MIME type. The data itself must be a JSON'able data
97 77 structure. Minimally all data should have the 'text/plain' data,
98 78 which can be displayed by all frontends. If more than the plain
99 79 text is given, it is up to the frontend to decide which
100 80 representation to use.
101 81 metadata : dict
102 82 A dictionary for metadata related to the data. This can contain
103 83 arbitrary key, value pairs that frontends can use to interpret
104 84 the data. Metadata specific to each mime-type can be specified
105 85 in the metadata dict with the same mime-type keys as
106 86 the data itself.
87 source : str, deprecated
88 Unused.
107 89 """
108 90
109 91 # The default is to simply write the plain text data using io.stdout.
110 92 if 'text/plain' in data:
111 93 print(data['text/plain'], file=io.stdout)
112 94
113 95 def clear_output(self, wait=False):
114 96 """Clear the output of the cell receiving output."""
115 97 print('\033[2K\r', file=io.stdout, end='')
116 98 io.stdout.flush()
117 99 print('\033[2K\r', file=io.stderr, end='')
118 100 io.stderr.flush()
119 101
120 102
121 103 class CapturingDisplayPublisher(DisplayPublisher):
122 104 """A DisplayPublisher that stores"""
123 105 outputs = List()
124 106
125 def publish(self, source, data, metadata=None):
126 self.outputs.append((source, data, metadata))
107 def publish(self, data, metadata=None, source=None):
108 self.outputs.append((data, metadata))
127 109
128 110 def clear_output(self, wait=False):
129 111 super(CapturingDisplayPublisher, self).clear_output(wait)
130 112
131 113 # empty the list, *do not* reassign a new list
132 114 del self.outputs[:]
133 115
134 116
135 def publish_display_data(source, data, metadata=None):
117 def publish_display_data(data, metadata=None, source=None):
136 118 """Publish data and metadata to all frontends.
137 119
138 120 See the ``display_data`` message in the messaging documentation for
139 121 more details about this message type.
140 122
141 123 The following MIME types are currently implemented:
142 124
143 125 * text/plain
144 126 * text/html
145 127 * text/markdown
146 128 * text/latex
147 129 * application/json
148 130 * application/javascript
149 131 * image/png
150 132 * image/jpeg
151 133 * image/svg+xml
152 134
153 135 Parameters
154 136 ----------
155 source : str
156 A string that give the function or method that created the data,
157 such as 'IPython.core.page'.
158 137 data : dict
159 138 A dictionary having keys that are valid MIME types (like
160 139 'text/plain' or 'image/svg+xml') and values that are the data for
161 140 that MIME type. The data itself must be a JSON'able data
162 141 structure. Minimally all data should have the 'text/plain' data,
163 142 which can be displayed by all frontends. If more than the plain
164 143 text is given, it is up to the frontend to decide which
165 144 representation to use.
166 145 metadata : dict
167 146 A dictionary for metadata related to the data. This can contain
168 147 arbitrary key, value pairs that frontends can use to interpret
169 148 the data. mime-type keys matching those in data can be used
170 149 to specify metadata about particular representations.
150 source : str, deprecated
151 Unused.
171 152 """
172 153 from IPython.core.interactiveshell import InteractiveShell
173 154 InteractiveShell.instance().display_pub.publish(
174 source,
175 data,
176 metadata
155 data=data,
156 metadata=metadata,
177 157 )
178 158
179 159
@@ -1,401 +1,401 b''
1 1 """Test suite for our zeromq-based message specification."""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 import re
7 7 from distutils.version import LooseVersion as V
8 8 from subprocess import PIPE
9 9 try:
10 10 from queue import Empty # Py 3
11 11 except ImportError:
12 12 from Queue import Empty # Py 2
13 13
14 14 import nose.tools as nt
15 15
16 16 from IPython.kernel import KernelManager
17 17
18 18 from IPython.utils.traitlets import (
19 19 HasTraits, TraitError, Bool, Unicode, Dict, Integer, List, Enum, Any,
20 20 )
21 21 from IPython.utils.py3compat import string_types, iteritems
22 22
23 23 from .utils import TIMEOUT, start_global_kernel, flush_channels, execute
24 24
25 25 #-----------------------------------------------------------------------------
26 26 # Globals
27 27 #-----------------------------------------------------------------------------
28 28 KC = None
29 29
30 30 def setup():
31 31 global KC
32 32 KC = start_global_kernel()
33 33
34 34 #-----------------------------------------------------------------------------
35 35 # Message Spec References
36 36 #-----------------------------------------------------------------------------
37 37
38 38 class Reference(HasTraits):
39 39
40 40 """
41 41 Base class for message spec specification testing.
42 42
43 43 This class is the core of the message specification test. The
44 44 idea is that child classes implement trait attributes for each
45 45 message keys, so that message keys can be tested against these
46 46 traits using :meth:`check` method.
47 47
48 48 """
49 49
50 50 def check(self, d):
51 51 """validate a dict against our traits"""
52 52 for key in self.trait_names():
53 53 nt.assert_in(key, d)
54 54 # FIXME: always allow None, probably not a good idea
55 55 if d[key] is None:
56 56 continue
57 57 try:
58 58 setattr(self, key, d[key])
59 59 except TraitError as e:
60 60 assert False, str(e)
61 61
62 62 class Version(Unicode):
63 63 def validate(self, obj, value):
64 64 min_version = self.default_value
65 65 if V(value) < V(min_version):
66 66 raise TraitError("bad version: %s < %s" % (value, min_version))
67 67
68 68 class RMessage(Reference):
69 69 msg_id = Unicode()
70 70 msg_type = Unicode()
71 71 header = Dict()
72 72 parent_header = Dict()
73 73 content = Dict()
74 74
75 75 def check(self, d):
76 76 super(RMessage, self).check(d)
77 77 RHeader().check(self.header)
78 78 if self.parent_header:
79 79 RHeader().check(self.parent_header)
80 80
81 81 class RHeader(Reference):
82 82 msg_id = Unicode()
83 83 msg_type = Unicode()
84 84 session = Unicode()
85 85 username = Unicode()
86 86 version = Version('5.0')
87 87
88 88 mime_pat = re.compile(r'\w+/\w+')
89 89
90 90 class MimeBundle(Reference):
91 91 metadata = Dict()
92 92 data = Dict()
93 93 def _data_changed(self, name, old, new):
94 94 for k,v in iteritems(new):
95 95 assert mime_pat.match(k)
96 96 nt.assert_is_instance(v, string_types)
97 97
98 98 # shell replies
99 99
100 100 class ExecuteReply(Reference):
101 101 execution_count = Integer()
102 102 status = Enum((u'ok', u'error'))
103 103
104 104 def check(self, d):
105 105 Reference.check(self, d)
106 106 if d['status'] == 'ok':
107 107 ExecuteReplyOkay().check(d)
108 108 elif d['status'] == 'error':
109 109 ExecuteReplyError().check(d)
110 110
111 111
112 112 class ExecuteReplyOkay(Reference):
113 113 payload = List(Dict)
114 114 user_expressions = Dict()
115 115
116 116
117 117 class ExecuteReplyError(Reference):
118 118 ename = Unicode()
119 119 evalue = Unicode()
120 120 traceback = List(Unicode)
121 121
122 122
123 123 class OInfoReply(MimeBundle):
124 124 name = Unicode()
125 125 found = Bool()
126 126
127 127
128 128 class ArgSpec(Reference):
129 129 args = List(Unicode)
130 130 varargs = Unicode()
131 131 varkw = Unicode()
132 132 defaults = List()
133 133
134 134
135 135 class Status(Reference):
136 136 execution_state = Enum((u'busy', u'idle', u'starting'))
137 137
138 138
139 139 class CompleteReply(Reference):
140 140 matches = List(Unicode)
141 141
142 142
143 143 class KernelInfoReply(Reference):
144 144 protocol_version = Version('5.0')
145 145 implementation = Unicode('ipython')
146 146 implementation_version = Version('2.1')
147 147 language_version = Version('2.7')
148 148 language = Unicode('python')
149 149 banner = Unicode()
150 150
151 151
152 152 # IOPub messages
153 153
154 154 class ExecuteInput(Reference):
155 155 code = Unicode()
156 156 execution_count = Integer()
157 157
158 158
159 159 Error = ExecuteReplyError
160 160
161 161
162 162 class Stream(Reference):
163 163 name = Enum((u'stdout', u'stderr'))
164 164 data = Unicode()
165 165
166 166
167 167 class DisplayData(MimeBundle):
168 source = Unicode()
168 pass
169 169
170 170
171 171 class ExecuteResult(MimeBundle):
172 172 execution_count = Integer()
173 173
174 174
175 175 references = {
176 176 'execute_reply' : ExecuteReply(),
177 177 'object_info_reply' : OInfoReply(),
178 178 'status' : Status(),
179 179 'complete_reply' : CompleteReply(),
180 180 'kernel_info_reply': KernelInfoReply(),
181 181 'execute_input' : ExecuteInput(),
182 182 'execute_result' : ExecuteResult(),
183 183 'error' : Error(),
184 184 'stream' : Stream(),
185 185 'display_data' : DisplayData(),
186 186 'header' : RHeader(),
187 187 }
188 188 """
189 189 Specifications of `content` part of the reply messages.
190 190 """
191 191
192 192
193 193 def validate_message(msg, msg_type=None, parent=None):
194 194 """validate a message
195 195
196 196 This is a generator, and must be iterated through to actually
197 197 trigger each test.
198 198
199 199 If msg_type and/or parent are given, the msg_type and/or parent msg_id
200 200 are compared with the given values.
201 201 """
202 202 RMessage().check(msg)
203 203 if msg_type:
204 204 nt.assert_equal(msg['msg_type'], msg_type)
205 205 if parent:
206 206 nt.assert_equal(msg['parent_header']['msg_id'], parent)
207 207 content = msg['content']
208 208 ref = references[msg['msg_type']]
209 209 ref.check(content)
210 210
211 211
212 212 #-----------------------------------------------------------------------------
213 213 # Tests
214 214 #-----------------------------------------------------------------------------
215 215
216 216 # Shell channel
217 217
218 218 def test_execute():
219 219 flush_channels()
220 220
221 221 msg_id = KC.execute(code='x=1')
222 222 reply = KC.get_shell_msg(timeout=TIMEOUT)
223 223 validate_message(reply, 'execute_reply', msg_id)
224 224
225 225
226 226 def test_execute_silent():
227 227 flush_channels()
228 228 msg_id, reply = execute(code='x=1', silent=True)
229 229
230 230 # flush status=idle
231 231 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
232 232 validate_message(status, 'status', msg_id)
233 233 nt.assert_equal(status['content']['execution_state'], 'idle')
234 234
235 235 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
236 236 count = reply['execution_count']
237 237
238 238 msg_id, reply = execute(code='x=2', silent=True)
239 239
240 240 # flush status=idle
241 241 status = KC.iopub_channel.get_msg(timeout=TIMEOUT)
242 242 validate_message(status, 'status', msg_id)
243 243 nt.assert_equal(status['content']['execution_state'], 'idle')
244 244
245 245 nt.assert_raises(Empty, KC.iopub_channel.get_msg, timeout=0.1)
246 246 count_2 = reply['execution_count']
247 247 nt.assert_equal(count_2, count)
248 248
249 249
250 250 def test_execute_error():
251 251 flush_channels()
252 252
253 253 msg_id, reply = execute(code='1/0')
254 254 nt.assert_equal(reply['status'], 'error')
255 255 nt.assert_equal(reply['ename'], 'ZeroDivisionError')
256 256
257 257 error = KC.iopub_channel.get_msg(timeout=TIMEOUT)
258 258 validate_message(error, 'error', msg_id)
259 259
260 260
261 261 def test_execute_inc():
262 262 """execute request should increment execution_count"""
263 263 flush_channels()
264 264
265 265 msg_id, reply = execute(code='x=1')
266 266 count = reply['execution_count']
267 267
268 268 flush_channels()
269 269
270 270 msg_id, reply = execute(code='x=2')
271 271 count_2 = reply['execution_count']
272 272 nt.assert_equal(count_2, count+1)
273 273
274 274
275 275 def test_user_expressions():
276 276 flush_channels()
277 277
278 278 msg_id, reply = execute(code='x=1', user_expressions=dict(foo='x+1'))
279 279 user_expressions = reply['user_expressions']
280 280 nt.assert_equal(user_expressions, {u'foo': {
281 281 u'status': u'ok',
282 282 u'data': {u'text/plain': u'2'},
283 283 u'metadata': {},
284 284 }})
285 285
286 286
287 287 def test_user_expressions_fail():
288 288 flush_channels()
289 289
290 290 msg_id, reply = execute(code='x=0', user_expressions=dict(foo='nosuchname'))
291 291 user_expressions = reply['user_expressions']
292 292 foo = user_expressions['foo']
293 293 nt.assert_equal(foo['status'], 'error')
294 294 nt.assert_equal(foo['ename'], 'NameError')
295 295
296 296
297 297 def test_oinfo():
298 298 flush_channels()
299 299
300 300 msg_id = KC.object_info('a')
301 301 reply = KC.get_shell_msg(timeout=TIMEOUT)
302 302 validate_message(reply, 'object_info_reply', msg_id)
303 303
304 304
305 305 def test_oinfo_found():
306 306 flush_channels()
307 307
308 308 msg_id, reply = execute(code='a=5')
309 309
310 310 msg_id = KC.object_info('a')
311 311 reply = KC.get_shell_msg(timeout=TIMEOUT)
312 312 validate_message(reply, 'object_info_reply', msg_id)
313 313 content = reply['content']
314 314 assert content['found']
315 315 nt.assert_equal(content['name'], 'a')
316 316 text = content['data']['text/plain']
317 317 nt.assert_in('Type:', text)
318 318 nt.assert_in('Docstring:', text)
319 319
320 320
321 321 def test_oinfo_detail():
322 322 flush_channels()
323 323
324 324 msg_id, reply = execute(code='ip=get_ipython()')
325 325
326 326 msg_id = KC.object_info('ip.object_inspect', cursor_pos=10, detail_level=1)
327 327 reply = KC.get_shell_msg(timeout=TIMEOUT)
328 328 validate_message(reply, 'object_info_reply', msg_id)
329 329 content = reply['content']
330 330 assert content['found']
331 331 nt.assert_equal(content['name'], 'ip.object_inspect')
332 332 text = content['data']['text/plain']
333 333 nt.assert_in('Definition:', text)
334 334 nt.assert_in('Source:', text)
335 335
336 336
337 337 def test_oinfo_not_found():
338 338 flush_channels()
339 339
340 340 msg_id = KC.object_info('dne')
341 341 reply = KC.get_shell_msg(timeout=TIMEOUT)
342 342 validate_message(reply, 'object_info_reply', msg_id)
343 343 content = reply['content']
344 344 nt.assert_false(content['found'])
345 345
346 346
347 347 def test_complete():
348 348 flush_channels()
349 349
350 350 msg_id, reply = execute(code="alpha = albert = 5")
351 351
352 352 msg_id = KC.complete('al', 2)
353 353 reply = KC.get_shell_msg(timeout=TIMEOUT)
354 354 validate_message(reply, 'complete_reply', msg_id)
355 355 matches = reply['content']['matches']
356 356 for name in ('alpha', 'albert'):
357 357 nt.assert_in(name, matches)
358 358
359 359
360 360 def test_kernel_info_request():
361 361 flush_channels()
362 362
363 363 msg_id = KC.kernel_info()
364 364 reply = KC.get_shell_msg(timeout=TIMEOUT)
365 365 validate_message(reply, 'kernel_info_reply', msg_id)
366 366
367 367
368 368 def test_single_payload():
369 369 flush_channels()
370 370 msg_id, reply = execute(code="for i in range(3):\n"+
371 371 " x=range?\n")
372 372 payload = reply['payload']
373 373 next_input_pls = [pl for pl in payload if pl["source"] == "set_next_input"]
374 374 nt.assert_equal(len(next_input_pls), 1)
375 375
376 376
377 377 # IOPub channel
378 378
379 379
380 380 def test_stream():
381 381 flush_channels()
382 382
383 383 msg_id, reply = execute("print('hi')")
384 384
385 385 stdout = KC.iopub_channel.get_msg(timeout=TIMEOUT)
386 386 validate_message(stdout, 'stream', msg_id)
387 387 content = stdout['content']
388 388 nt.assert_equal(content['name'], u'stdout')
389 389 nt.assert_equal(content['data'], u'hi\n')
390 390
391 391
392 392 def test_display_data():
393 393 flush_channels()
394 394
395 395 msg_id, reply = execute("from IPython.core.display import display; display(1)")
396 396
397 397 display = KC.iopub_channel.get_msg(timeout=TIMEOUT)
398 398 validate_message(display, 'display_data', parent=msg_id)
399 399 data = display['content']['data']
400 400 nt.assert_equal(data['text/plain'], u'1')
401 401
@@ -1,573 +1,572 b''
1 1 """A ZMQ-based subclass of InteractiveShell.
2 2
3 3 This code is meant to ease the refactoring of the base InteractiveShell into
4 4 something with a cleaner architecture for 2-process use, without actually
5 5 breaking InteractiveShell itself. So we're doing something a bit ugly, where
6 6 we subclass and override what we want to fix. Once this is working well, we
7 7 can go back to the base class and refactor the code for a cleaner inheritance
8 8 implementation that doesn't rely on so much monkeypatching.
9 9
10 10 But this lets us maintain a fully working IPython as we develop the new
11 11 machinery. This should thus be thought of as scaffolding.
12 12 """
13 13
14 14 # Copyright (c) IPython Development Team.
15 15 # Distributed under the terms of the Modified BSD License.
16 16
17 17 from __future__ import print_function
18 18
19 19 import os
20 20 import sys
21 21 import time
22 22
23 23 from zmq.eventloop import ioloop
24 24
25 25 from IPython.core.interactiveshell import (
26 26 InteractiveShell, InteractiveShellABC
27 27 )
28 28 from IPython.core import page
29 29 from IPython.core.autocall import ZMQExitAutocall
30 30 from IPython.core.displaypub import DisplayPublisher
31 31 from IPython.core.error import UsageError
32 32 from IPython.core.magics import MacroToEdit, CodeMagics
33 33 from IPython.core.magic import magics_class, line_magic, Magics
34 34 from IPython.core.payloadpage import install_payload_page
35 35 from IPython.core.usage import default_gui_banner
36 36 from IPython.display import display, Javascript
37 37 from IPython.kernel.inprocess.socket import SocketABC
38 38 from IPython.kernel import (
39 39 get_connection_file, get_connection_info, connect_qtconsole
40 40 )
41 41 from IPython.testing.skipdoctest import skip_doctest
42 42 from IPython.utils import openpy
43 43 from IPython.utils.jsonutil import json_clean, encode_images
44 44 from IPython.utils.process import arg_split
45 45 from IPython.utils import py3compat
46 46 from IPython.utils.py3compat import unicode_type
47 47 from IPython.utils.traitlets import Instance, Type, Dict, CBool, CBytes, Any
48 48 from IPython.utils.warn import error
49 49 from IPython.kernel.zmq.displayhook import ZMQShellDisplayHook
50 50 from IPython.kernel.zmq.datapub import ZMQDataPublisher
51 51 from IPython.kernel.zmq.session import extract_header
52 52 from IPython.kernel.comm import CommManager
53 53 from .session import Session
54 54
55 55 #-----------------------------------------------------------------------------
56 56 # Functions and classes
57 57 #-----------------------------------------------------------------------------
58 58
59 59 class ZMQDisplayPublisher(DisplayPublisher):
60 60 """A display publisher that publishes data using a ZeroMQ PUB socket."""
61 61
62 62 session = Instance(Session)
63 63 pub_socket = Instance(SocketABC)
64 64 parent_header = Dict({})
65 65 topic = CBytes(b'display_data')
66 66
67 67 def set_parent(self, parent):
68 68 """Set the parent for outbound messages."""
69 69 self.parent_header = extract_header(parent)
70 70
71 71 def _flush_streams(self):
72 72 """flush IO Streams prior to display"""
73 73 sys.stdout.flush()
74 74 sys.stderr.flush()
75 75
76 def publish(self, source, data, metadata=None):
76 def publish(self, data, metadata=None, source=None):
77 77 self._flush_streams()
78 78 if metadata is None:
79 79 metadata = {}
80 self._validate_data(source, data, metadata)
80 self._validate_data(data, metadata)
81 81 content = {}
82 content['source'] = source
83 82 content['data'] = encode_images(data)
84 83 content['metadata'] = metadata
85 84 self.session.send(
86 85 self.pub_socket, u'display_data', json_clean(content),
87 86 parent=self.parent_header, ident=self.topic,
88 87 )
89 88
90 89 def clear_output(self, wait=False):
91 90 content = dict(wait=wait)
92 91 self._flush_streams()
93 92 self.session.send(
94 93 self.pub_socket, u'clear_output', content,
95 94 parent=self.parent_header, ident=self.topic,
96 95 )
97 96
98 97 @magics_class
99 98 class KernelMagics(Magics):
100 99 #------------------------------------------------------------------------
101 100 # Magic overrides
102 101 #------------------------------------------------------------------------
103 102 # Once the base class stops inheriting from magic, this code needs to be
104 103 # moved into a separate machinery as well. For now, at least isolate here
105 104 # the magics which this class needs to implement differently from the base
106 105 # class, or that are unique to it.
107 106
108 107 @line_magic
109 108 def doctest_mode(self, parameter_s=''):
110 109 """Toggle doctest mode on and off.
111 110
112 111 This mode is intended to make IPython behave as much as possible like a
113 112 plain Python shell, from the perspective of how its prompts, exceptions
114 113 and output look. This makes it easy to copy and paste parts of a
115 114 session into doctests. It does so by:
116 115
117 116 - Changing the prompts to the classic ``>>>`` ones.
118 117 - Changing the exception reporting mode to 'Plain'.
119 118 - Disabling pretty-printing of output.
120 119
121 120 Note that IPython also supports the pasting of code snippets that have
122 121 leading '>>>' and '...' prompts in them. This means that you can paste
123 122 doctests from files or docstrings (even if they have leading
124 123 whitespace), and the code will execute correctly. You can then use
125 124 '%history -t' to see the translated history; this will give you the
126 125 input after removal of all the leading prompts and whitespace, which
127 126 can be pasted back into an editor.
128 127
129 128 With these features, you can switch into this mode easily whenever you
130 129 need to do testing and changes to doctests, without having to leave
131 130 your existing IPython session.
132 131 """
133 132
134 133 from IPython.utils.ipstruct import Struct
135 134
136 135 # Shorthands
137 136 shell = self.shell
138 137 disp_formatter = self.shell.display_formatter
139 138 ptformatter = disp_formatter.formatters['text/plain']
140 139 # dstore is a data store kept in the instance metadata bag to track any
141 140 # changes we make, so we can undo them later.
142 141 dstore = shell.meta.setdefault('doctest_mode', Struct())
143 142 save_dstore = dstore.setdefault
144 143
145 144 # save a few values we'll need to recover later
146 145 mode = save_dstore('mode', False)
147 146 save_dstore('rc_pprint', ptformatter.pprint)
148 147 save_dstore('rc_active_types',disp_formatter.active_types)
149 148 save_dstore('xmode', shell.InteractiveTB.mode)
150 149
151 150 if mode == False:
152 151 # turn on
153 152 ptformatter.pprint = False
154 153 disp_formatter.active_types = ['text/plain']
155 154 shell.magic('xmode Plain')
156 155 else:
157 156 # turn off
158 157 ptformatter.pprint = dstore.rc_pprint
159 158 disp_formatter.active_types = dstore.rc_active_types
160 159 shell.magic("xmode " + dstore.xmode)
161 160
162 161 # Store new mode and inform on console
163 162 dstore.mode = bool(1-int(mode))
164 163 mode_label = ['OFF','ON'][dstore.mode]
165 164 print('Doctest mode is:', mode_label)
166 165
167 166 # Send the payload back so that clients can modify their prompt display
168 167 payload = dict(
169 168 source='doctest_mode',
170 169 mode=dstore.mode)
171 170 shell.payload_manager.write_payload(payload)
172 171
173 172
174 173 _find_edit_target = CodeMagics._find_edit_target
175 174
176 175 @skip_doctest
177 176 @line_magic
178 177 def edit(self, parameter_s='', last_call=['','']):
179 178 """Bring up an editor and execute the resulting code.
180 179
181 180 Usage:
182 181 %edit [options] [args]
183 182
184 183 %edit runs an external text editor. You will need to set the command for
185 184 this editor via the ``TerminalInteractiveShell.editor`` option in your
186 185 configuration file before it will work.
187 186
188 187 This command allows you to conveniently edit multi-line code right in
189 188 your IPython session.
190 189
191 190 If called without arguments, %edit opens up an empty editor with a
192 191 temporary file and will execute the contents of this file when you
193 192 close it (don't forget to save it!).
194 193
195 194 Options:
196 195
197 196 -n <number>
198 197 Open the editor at a specified line number. By default, the IPython
199 198 editor hook uses the unix syntax 'editor +N filename', but you can
200 199 configure this by providing your own modified hook if your favorite
201 200 editor supports line-number specifications with a different syntax.
202 201
203 202 -p
204 203 Call the editor with the same data as the previous time it was used,
205 204 regardless of how long ago (in your current session) it was.
206 205
207 206 -r
208 207 Use 'raw' input. This option only applies to input taken from the
209 208 user's history. By default, the 'processed' history is used, so that
210 209 magics are loaded in their transformed version to valid Python. If
211 210 this option is given, the raw input as typed as the command line is
212 211 used instead. When you exit the editor, it will be executed by
213 212 IPython's own processor.
214 213
215 214 Arguments:
216 215
217 216 If arguments are given, the following possibilites exist:
218 217
219 218 - The arguments are numbers or pairs of colon-separated numbers (like
220 219 1 4:8 9). These are interpreted as lines of previous input to be
221 220 loaded into the editor. The syntax is the same of the %macro command.
222 221
223 222 - If the argument doesn't start with a number, it is evaluated as a
224 223 variable and its contents loaded into the editor. You can thus edit
225 224 any string which contains python code (including the result of
226 225 previous edits).
227 226
228 227 - If the argument is the name of an object (other than a string),
229 228 IPython will try to locate the file where it was defined and open the
230 229 editor at the point where it is defined. You can use ``%edit function``
231 230 to load an editor exactly at the point where 'function' is defined,
232 231 edit it and have the file be executed automatically.
233 232
234 233 If the object is a macro (see %macro for details), this opens up your
235 234 specified editor with a temporary file containing the macro's data.
236 235 Upon exit, the macro is reloaded with the contents of the file.
237 236
238 237 Note: opening at an exact line is only supported under Unix, and some
239 238 editors (like kedit and gedit up to Gnome 2.8) do not understand the
240 239 '+NUMBER' parameter necessary for this feature. Good editors like
241 240 (X)Emacs, vi, jed, pico and joe all do.
242 241
243 242 - If the argument is not found as a variable, IPython will look for a
244 243 file with that name (adding .py if necessary) and load it into the
245 244 editor. It will execute its contents with execfile() when you exit,
246 245 loading any code in the file into your interactive namespace.
247 246
248 247 Unlike in the terminal, this is designed to use a GUI editor, and we do
249 248 not know when it has closed. So the file you edit will not be
250 249 automatically executed or printed.
251 250
252 251 Note that %edit is also available through the alias %ed.
253 252 """
254 253
255 254 opts,args = self.parse_options(parameter_s,'prn:')
256 255
257 256 try:
258 257 filename, lineno, _ = CodeMagics._find_edit_target(self.shell, args, opts, last_call)
259 258 except MacroToEdit as e:
260 259 # TODO: Implement macro editing over 2 processes.
261 260 print("Macro editing not yet implemented in 2-process model.")
262 261 return
263 262
264 263 # Make sure we send to the client an absolute path, in case the working
265 264 # directory of client and kernel don't match
266 265 filename = os.path.abspath(filename)
267 266
268 267 payload = {
269 268 'source' : 'edit_magic',
270 269 'filename' : filename,
271 270 'line_number' : lineno
272 271 }
273 272 self.shell.payload_manager.write_payload(payload)
274 273
275 274 # A few magics that are adapted to the specifics of using pexpect and a
276 275 # remote terminal
277 276
278 277 @line_magic
279 278 def clear(self, arg_s):
280 279 """Clear the terminal."""
281 280 if os.name == 'posix':
282 281 self.shell.system("clear")
283 282 else:
284 283 self.shell.system("cls")
285 284
286 285 if os.name == 'nt':
287 286 # This is the usual name in windows
288 287 cls = line_magic('cls')(clear)
289 288
290 289 # Terminal pagers won't work over pexpect, but we do have our own pager
291 290
292 291 @line_magic
293 292 def less(self, arg_s):
294 293 """Show a file through the pager.
295 294
296 295 Files ending in .py are syntax-highlighted."""
297 296 if not arg_s:
298 297 raise UsageError('Missing filename.')
299 298
300 299 cont = open(arg_s).read()
301 300 if arg_s.endswith('.py'):
302 301 cont = self.shell.pycolorize(openpy.read_py_file(arg_s, skip_encoding_cookie=False))
303 302 else:
304 303 cont = open(arg_s).read()
305 304 page.page(cont)
306 305
307 306 more = line_magic('more')(less)
308 307
309 308 # Man calls a pager, so we also need to redefine it
310 309 if os.name == 'posix':
311 310 @line_magic
312 311 def man(self, arg_s):
313 312 """Find the man page for the given command and display in pager."""
314 313 page.page(self.shell.getoutput('man %s | col -b' % arg_s,
315 314 split=False))
316 315
317 316 @line_magic
318 317 def connect_info(self, arg_s):
319 318 """Print information for connecting other clients to this kernel
320 319
321 320 It will print the contents of this session's connection file, as well as
322 321 shortcuts for local clients.
323 322
324 323 In the simplest case, when called from the most recently launched kernel,
325 324 secondary clients can be connected, simply with:
326 325
327 326 $> ipython <app> --existing
328 327
329 328 """
330 329
331 330 from IPython.core.application import BaseIPythonApplication as BaseIPApp
332 331
333 332 if BaseIPApp.initialized():
334 333 app = BaseIPApp.instance()
335 334 security_dir = app.profile_dir.security_dir
336 335 profile = app.profile
337 336 else:
338 337 profile = 'default'
339 338 security_dir = ''
340 339
341 340 try:
342 341 connection_file = get_connection_file()
343 342 info = get_connection_info(unpack=False)
344 343 except Exception as e:
345 344 error("Could not get connection info: %r" % e)
346 345 return
347 346
348 347 # add profile flag for non-default profile
349 348 profile_flag = "--profile %s" % profile if profile != 'default' else ""
350 349
351 350 # if it's in the security dir, truncate to basename
352 351 if security_dir == os.path.dirname(connection_file):
353 352 connection_file = os.path.basename(connection_file)
354 353
355 354
356 355 print (info + '\n')
357 356 print ("Paste the above JSON into a file, and connect with:\n"
358 357 " $> ipython <app> --existing <file>\n"
359 358 "or, if you are local, you can connect with just:\n"
360 359 " $> ipython <app> --existing {0} {1}\n"
361 360 "or even just:\n"
362 361 " $> ipython <app> --existing {1}\n"
363 362 "if this is the most recent IPython session you have started.".format(
364 363 connection_file, profile_flag
365 364 )
366 365 )
367 366
368 367 @line_magic
369 368 def qtconsole(self, arg_s):
370 369 """Open a qtconsole connected to this kernel.
371 370
372 371 Useful for connecting a qtconsole to running notebooks, for better
373 372 debugging.
374 373 """
375 374
376 375 # %qtconsole should imply bind_kernel for engines:
377 376 try:
378 377 from IPython.parallel import bind_kernel
379 378 except ImportError:
380 379 # technically possible, because parallel has higher pyzmq min-version
381 380 pass
382 381 else:
383 382 bind_kernel()
384 383
385 384 try:
386 385 p = connect_qtconsole(argv=arg_split(arg_s, os.name=='posix'))
387 386 except Exception as e:
388 387 error("Could not start qtconsole: %r" % e)
389 388 return
390 389
391 390 @line_magic
392 391 def autosave(self, arg_s):
393 392 """Set the autosave interval in the notebook (in seconds).
394 393
395 394 The default value is 120, or two minutes.
396 395 ``%autosave 0`` will disable autosave.
397 396
398 397 This magic only has an effect when called from the notebook interface.
399 398 It has no effect when called in a startup file.
400 399 """
401 400
402 401 try:
403 402 interval = int(arg_s)
404 403 except ValueError:
405 404 raise UsageError("%%autosave requires an integer, got %r" % arg_s)
406 405
407 406 # javascript wants milliseconds
408 407 milliseconds = 1000 * interval
409 408 display(Javascript("IPython.notebook.set_autosave_interval(%i)" % milliseconds),
410 409 include=['application/javascript']
411 410 )
412 411 if interval:
413 412 print("Autosaving every %i seconds" % interval)
414 413 else:
415 414 print("Autosave disabled")
416 415
417 416
418 417 class ZMQInteractiveShell(InteractiveShell):
419 418 """A subclass of InteractiveShell for ZMQ."""
420 419
421 420 displayhook_class = Type(ZMQShellDisplayHook)
422 421 display_pub_class = Type(ZMQDisplayPublisher)
423 422 data_pub_class = Type(ZMQDataPublisher)
424 423 kernel = Any()
425 424 parent_header = Any()
426 425
427 426 def _banner1_default(self):
428 427 return default_gui_banner
429 428
430 429 # Override the traitlet in the parent class, because there's no point using
431 430 # readline for the kernel. Can be removed when the readline code is moved
432 431 # to the terminal frontend.
433 432 colors_force = CBool(True)
434 433 readline_use = CBool(False)
435 434 # autoindent has no meaning in a zmqshell, and attempting to enable it
436 435 # will print a warning in the absence of readline.
437 436 autoindent = CBool(False)
438 437
439 438 exiter = Instance(ZMQExitAutocall)
440 439 def _exiter_default(self):
441 440 return ZMQExitAutocall(self)
442 441
443 442 def _exit_now_changed(self, name, old, new):
444 443 """stop eventloop when exit_now fires"""
445 444 if new:
446 445 loop = ioloop.IOLoop.instance()
447 446 loop.add_timeout(time.time()+0.1, loop.stop)
448 447
449 448 keepkernel_on_exit = None
450 449
451 450 # Over ZeroMQ, GUI control isn't done with PyOS_InputHook as there is no
452 451 # interactive input being read; we provide event loop support in ipkernel
453 452 @staticmethod
454 453 def enable_gui(gui):
455 454 from .eventloops import enable_gui as real_enable_gui
456 455 try:
457 456 real_enable_gui(gui)
458 457 except ValueError as e:
459 458 raise UsageError("%s" % e)
460 459
461 460 def init_environment(self):
462 461 """Configure the user's environment.
463 462
464 463 """
465 464 env = os.environ
466 465 # These two ensure 'ls' produces nice coloring on BSD-derived systems
467 466 env['TERM'] = 'xterm-color'
468 467 env['CLICOLOR'] = '1'
469 468 # Since normal pagers don't work at all (over pexpect we don't have
470 469 # single-key control of the subprocess), try to disable paging in
471 470 # subprocesses as much as possible.
472 471 env['PAGER'] = 'cat'
473 472 env['GIT_PAGER'] = 'cat'
474 473
475 474 # And install the payload version of page.
476 475 install_payload_page()
477 476
478 477 def auto_rewrite_input(self, cmd):
479 478 """Called to show the auto-rewritten input for autocall and friends.
480 479
481 480 FIXME: this payload is currently not correctly processed by the
482 481 frontend.
483 482 """
484 483 new = self.prompt_manager.render('rewrite') + cmd
485 484 payload = dict(
486 485 source='auto_rewrite_input',
487 486 transformed_input=new,
488 487 )
489 488 self.payload_manager.write_payload(payload)
490 489
491 490 def ask_exit(self):
492 491 """Engage the exit actions."""
493 492 self.exit_now = True
494 493 payload = dict(
495 494 source='ask_exit',
496 495 exit=True,
497 496 keepkernel=self.keepkernel_on_exit,
498 497 )
499 498 self.payload_manager.write_payload(payload)
500 499
501 500 def _showtraceback(self, etype, evalue, stb):
502 501 # try to preserve ordering of tracebacks and print statements
503 502 sys.stdout.flush()
504 503 sys.stderr.flush()
505 504
506 505 exc_content = {
507 506 u'traceback' : stb,
508 507 u'ename' : unicode_type(etype.__name__),
509 508 u'evalue' : py3compat.safe_unicode(evalue),
510 509 }
511 510
512 511 dh = self.displayhook
513 512 # Send exception info over pub socket for other clients than the caller
514 513 # to pick up
515 514 topic = None
516 515 if dh.topic:
517 516 topic = dh.topic.replace(b'execute_result', b'error')
518 517
519 518 exc_msg = dh.session.send(dh.pub_socket, u'error', json_clean(exc_content), dh.parent_header, ident=topic)
520 519
521 520 # FIXME - Hack: store exception info in shell object. Right now, the
522 521 # caller is reading this info after the fact, we need to fix this logic
523 522 # to remove this hack. Even uglier, we need to store the error status
524 523 # here, because in the main loop, the logic that sets it is being
525 524 # skipped because runlines swallows the exceptions.
526 525 exc_content[u'status'] = u'error'
527 526 self._reply_content = exc_content
528 527 # /FIXME
529 528
530 529 return exc_content
531 530
532 531 def set_next_input(self, text):
533 532 """Send the specified text to the frontend to be presented at the next
534 533 input cell."""
535 534 payload = dict(
536 535 source='set_next_input',
537 536 text=text
538 537 )
539 538 self.payload_manager.write_payload(payload)
540 539
541 540 def set_parent(self, parent):
542 541 """Set the parent header for associating output with its triggering input"""
543 542 self.parent_header = parent
544 543 self.displayhook.set_parent(parent)
545 544 self.display_pub.set_parent(parent)
546 545 self.data_pub.set_parent(parent)
547 546 try:
548 547 sys.stdout.set_parent(parent)
549 548 except AttributeError:
550 549 pass
551 550 try:
552 551 sys.stderr.set_parent(parent)
553 552 except AttributeError:
554 553 pass
555 554
556 555 def get_parent(self):
557 556 return self.parent_header
558 557
559 558 #-------------------------------------------------------------------------
560 559 # Things related to magics
561 560 #-------------------------------------------------------------------------
562 561
563 562 def init_magics(self):
564 563 super(ZMQInteractiveShell, self).init_magics()
565 564 self.register_magics(KernelMagics)
566 565 self.magics_manager.register_alias('ed', 'edit')
567 566
568 567 def init_comms(self):
569 568 self.comm_manager = CommManager(shell=self, parent=self)
570 569 self.configurables.append(self.comm_manager)
571 570
572 571
573 572 InteractiveShellABC.register(ZMQInteractiveShell)
@@ -1,1863 +1,1863 b''
1 1 """A semi-synchronous Client for IPython parallel"""
2 2
3 3 # Copyright (c) IPython Development Team.
4 4 # Distributed under the terms of the Modified BSD License.
5 5
6 6 from __future__ import print_function
7 7
8 8 import os
9 9 import json
10 10 import sys
11 11 from threading import Thread, Event
12 12 import time
13 13 import warnings
14 14 from datetime import datetime
15 15 from getpass import getpass
16 16 from pprint import pprint
17 17
18 18 pjoin = os.path.join
19 19
20 20 import zmq
21 21
22 22 from IPython.config.configurable import MultipleInstanceError
23 23 from IPython.core.application import BaseIPythonApplication
24 24 from IPython.core.profiledir import ProfileDir, ProfileDirError
25 25
26 26 from IPython.utils.capture import RichOutput
27 27 from IPython.utils.coloransi import TermColors
28 28 from IPython.utils.jsonutil import rekey, extract_dates, parse_date
29 29 from IPython.utils.localinterfaces import localhost, is_local_ip
30 30 from IPython.utils.path import get_ipython_dir
31 31 from IPython.utils.py3compat import cast_bytes, string_types, xrange, iteritems
32 32 from IPython.utils.traitlets import (HasTraits, Integer, Instance, Unicode,
33 33 Dict, List, Bool, Set, Any)
34 34 from IPython.external.decorator import decorator
35 35 from IPython.external.ssh import tunnel
36 36
37 37 from IPython.parallel import Reference
38 38 from IPython.parallel import error
39 39 from IPython.parallel import util
40 40
41 41 from IPython.kernel.zmq.session import Session, Message
42 42 from IPython.kernel.zmq import serialize
43 43
44 44 from .asyncresult import AsyncResult, AsyncHubResult
45 45 from .view import DirectView, LoadBalancedView
46 46
47 47 #--------------------------------------------------------------------------
48 48 # Decorators for Client methods
49 49 #--------------------------------------------------------------------------
50 50
51 51 @decorator
52 52 def spin_first(f, self, *args, **kwargs):
53 53 """Call spin() to sync state prior to calling the method."""
54 54 self.spin()
55 55 return f(self, *args, **kwargs)
56 56
57 57
58 58 #--------------------------------------------------------------------------
59 59 # Classes
60 60 #--------------------------------------------------------------------------
61 61
62 62
63 63 class ExecuteReply(RichOutput):
64 64 """wrapper for finished Execute results"""
65 65 def __init__(self, msg_id, content, metadata):
66 66 self.msg_id = msg_id
67 67 self._content = content
68 68 self.execution_count = content['execution_count']
69 69 self.metadata = metadata
70 70
71 71 # RichOutput overrides
72 72
73 73 @property
74 74 def source(self):
75 75 execute_result = self.metadata['execute_result']
76 76 if execute_result:
77 77 return execute_result.get('source', '')
78 78
79 79 @property
80 80 def data(self):
81 81 execute_result = self.metadata['execute_result']
82 82 if execute_result:
83 83 return execute_result.get('data', {})
84 84
85 85 @property
86 86 def _metadata(self):
87 87 execute_result = self.metadata['execute_result']
88 88 if execute_result:
89 89 return execute_result.get('metadata', {})
90 90
91 91 def display(self):
92 92 from IPython.display import publish_display_data
93 publish_display_data(self.source, self.data, self.metadata)
93 publish_display_data(self.data, self.metadata)
94 94
95 95 def _repr_mime_(self, mime):
96 96 if mime not in self.data:
97 97 return
98 98 data = self.data[mime]
99 99 if mime in self._metadata:
100 100 return data, self._metadata[mime]
101 101 else:
102 102 return data
103 103
104 104 def __getitem__(self, key):
105 105 return self.metadata[key]
106 106
107 107 def __getattr__(self, key):
108 108 if key not in self.metadata:
109 109 raise AttributeError(key)
110 110 return self.metadata[key]
111 111
112 112 def __repr__(self):
113 113 execute_result = self.metadata['execute_result'] or {'data':{}}
114 114 text_out = execute_result['data'].get('text/plain', '')
115 115 if len(text_out) > 32:
116 116 text_out = text_out[:29] + '...'
117 117
118 118 return "<ExecuteReply[%i]: %s>" % (self.execution_count, text_out)
119 119
120 120 def _repr_pretty_(self, p, cycle):
121 121 execute_result = self.metadata['execute_result'] or {'data':{}}
122 122 text_out = execute_result['data'].get('text/plain', '')
123 123
124 124 if not text_out:
125 125 return
126 126
127 127 try:
128 128 ip = get_ipython()
129 129 except NameError:
130 130 colors = "NoColor"
131 131 else:
132 132 colors = ip.colors
133 133
134 134 if colors == "NoColor":
135 135 out = normal = ""
136 136 else:
137 137 out = TermColors.Red
138 138 normal = TermColors.Normal
139 139
140 140 if '\n' in text_out and not text_out.startswith('\n'):
141 141 # add newline for multiline reprs
142 142 text_out = '\n' + text_out
143 143
144 144 p.text(
145 145 out + u'Out[%i:%i]: ' % (
146 146 self.metadata['engine_id'], self.execution_count
147 147 ) + normal + text_out
148 148 )
149 149
150 150
151 151 class Metadata(dict):
152 152 """Subclass of dict for initializing metadata values.
153 153
154 154 Attribute access works on keys.
155 155
156 156 These objects have a strict set of keys - errors will raise if you try
157 157 to add new keys.
158 158 """
159 159 def __init__(self, *args, **kwargs):
160 160 dict.__init__(self)
161 161 md = {'msg_id' : None,
162 162 'submitted' : None,
163 163 'started' : None,
164 164 'completed' : None,
165 165 'received' : None,
166 166 'engine_uuid' : None,
167 167 'engine_id' : None,
168 168 'follow' : None,
169 169 'after' : None,
170 170 'status' : None,
171 171
172 172 'execute_input' : None,
173 173 'execute_result' : None,
174 174 'error' : None,
175 175 'stdout' : '',
176 176 'stderr' : '',
177 177 'outputs' : [],
178 178 'data': {},
179 179 'outputs_ready' : False,
180 180 }
181 181 self.update(md)
182 182 self.update(dict(*args, **kwargs))
183 183
184 184 def __getattr__(self, key):
185 185 """getattr aliased to getitem"""
186 186 if key in self:
187 187 return self[key]
188 188 else:
189 189 raise AttributeError(key)
190 190
191 191 def __setattr__(self, key, value):
192 192 """setattr aliased to setitem, with strict"""
193 193 if key in self:
194 194 self[key] = value
195 195 else:
196 196 raise AttributeError(key)
197 197
198 198 def __setitem__(self, key, value):
199 199 """strict static key enforcement"""
200 200 if key in self:
201 201 dict.__setitem__(self, key, value)
202 202 else:
203 203 raise KeyError(key)
204 204
205 205
206 206 class Client(HasTraits):
207 207 """A semi-synchronous client to the IPython ZMQ cluster
208 208
209 209 Parameters
210 210 ----------
211 211
212 212 url_file : str/unicode; path to ipcontroller-client.json
213 213 This JSON file should contain all the information needed to connect to a cluster,
214 214 and is likely the only argument needed.
215 215 Connection information for the Hub's registration. If a json connector
216 216 file is given, then likely no further configuration is necessary.
217 217 [Default: use profile]
218 218 profile : bytes
219 219 The name of the Cluster profile to be used to find connector information.
220 220 If run from an IPython application, the default profile will be the same
221 221 as the running application, otherwise it will be 'default'.
222 222 cluster_id : str
223 223 String id to added to runtime files, to prevent name collisions when using
224 224 multiple clusters with a single profile simultaneously.
225 225 When set, will look for files named like: 'ipcontroller-<cluster_id>-client.json'
226 226 Since this is text inserted into filenames, typical recommendations apply:
227 227 Simple character strings are ideal, and spaces are not recommended (but
228 228 should generally work)
229 229 context : zmq.Context
230 230 Pass an existing zmq.Context instance, otherwise the client will create its own.
231 231 debug : bool
232 232 flag for lots of message printing for debug purposes
233 233 timeout : int/float
234 234 time (in seconds) to wait for connection replies from the Hub
235 235 [Default: 10]
236 236
237 237 #-------------- session related args ----------------
238 238
239 239 config : Config object
240 240 If specified, this will be relayed to the Session for configuration
241 241 username : str
242 242 set username for the session object
243 243
244 244 #-------------- ssh related args ----------------
245 245 # These are args for configuring the ssh tunnel to be used
246 246 # credentials are used to forward connections over ssh to the Controller
247 247 # Note that the ip given in `addr` needs to be relative to sshserver
248 248 # The most basic case is to leave addr as pointing to localhost (127.0.0.1),
249 249 # and set sshserver as the same machine the Controller is on. However,
250 250 # the only requirement is that sshserver is able to see the Controller
251 251 # (i.e. is within the same trusted network).
252 252
253 253 sshserver : str
254 254 A string of the form passed to ssh, i.e. 'server.tld' or 'user@server.tld:port'
255 255 If keyfile or password is specified, and this is not, it will default to
256 256 the ip given in addr.
257 257 sshkey : str; path to ssh private key file
258 258 This specifies a key to be used in ssh login, default None.
259 259 Regular default ssh keys will be used without specifying this argument.
260 260 password : str
261 261 Your ssh password to sshserver. Note that if this is left None,
262 262 you will be prompted for it if passwordless key based login is unavailable.
263 263 paramiko : bool
264 264 flag for whether to use paramiko instead of shell ssh for tunneling.
265 265 [default: True on win32, False else]
266 266
267 267
268 268 Attributes
269 269 ----------
270 270
271 271 ids : list of int engine IDs
272 272 requesting the ids attribute always synchronizes
273 273 the registration state. To request ids without synchronization,
274 274 use semi-private _ids attributes.
275 275
276 276 history : list of msg_ids
277 277 a list of msg_ids, keeping track of all the execution
278 278 messages you have submitted in order.
279 279
280 280 outstanding : set of msg_ids
281 281 a set of msg_ids that have been submitted, but whose
282 282 results have not yet been received.
283 283
284 284 results : dict
285 285 a dict of all our results, keyed by msg_id
286 286
287 287 block : bool
288 288 determines default behavior when block not specified
289 289 in execution methods
290 290
291 291 Methods
292 292 -------
293 293
294 294 spin
295 295 flushes incoming results and registration state changes
296 296 control methods spin, and requesting `ids` also ensures up to date
297 297
298 298 wait
299 299 wait on one or more msg_ids
300 300
301 301 execution methods
302 302 apply
303 303 legacy: execute, run
304 304
305 305 data movement
306 306 push, pull, scatter, gather
307 307
308 308 query methods
309 309 queue_status, get_result, purge, result_status
310 310
311 311 control methods
312 312 abort, shutdown
313 313
314 314 """
315 315
316 316
317 317 block = Bool(False)
318 318 outstanding = Set()
319 319 results = Instance('collections.defaultdict', (dict,))
320 320 metadata = Instance('collections.defaultdict', (Metadata,))
321 321 history = List()
322 322 debug = Bool(False)
323 323 _spin_thread = Any()
324 324 _stop_spinning = Any()
325 325
326 326 profile=Unicode()
327 327 def _profile_default(self):
328 328 if BaseIPythonApplication.initialized():
329 329 # an IPython app *might* be running, try to get its profile
330 330 try:
331 331 return BaseIPythonApplication.instance().profile
332 332 except (AttributeError, MultipleInstanceError):
333 333 # could be a *different* subclass of config.Application,
334 334 # which would raise one of these two errors.
335 335 return u'default'
336 336 else:
337 337 return u'default'
338 338
339 339
340 340 _outstanding_dict = Instance('collections.defaultdict', (set,))
341 341 _ids = List()
342 342 _connected=Bool(False)
343 343 _ssh=Bool(False)
344 344 _context = Instance('zmq.Context')
345 345 _config = Dict()
346 346 _engines=Instance(util.ReverseDict, (), {})
347 347 # _hub_socket=Instance('zmq.Socket')
348 348 _query_socket=Instance('zmq.Socket')
349 349 _control_socket=Instance('zmq.Socket')
350 350 _iopub_socket=Instance('zmq.Socket')
351 351 _notification_socket=Instance('zmq.Socket')
352 352 _mux_socket=Instance('zmq.Socket')
353 353 _task_socket=Instance('zmq.Socket')
354 354 _task_scheme=Unicode()
355 355 _closed = False
356 356 _ignored_control_replies=Integer(0)
357 357 _ignored_hub_replies=Integer(0)
358 358
359 359 def __new__(self, *args, **kw):
360 360 # don't raise on positional args
361 361 return HasTraits.__new__(self, **kw)
362 362
363 363 def __init__(self, url_file=None, profile=None, profile_dir=None, ipython_dir=None,
364 364 context=None, debug=False,
365 365 sshserver=None, sshkey=None, password=None, paramiko=None,
366 366 timeout=10, cluster_id=None, **extra_args
367 367 ):
368 368 if profile:
369 369 super(Client, self).__init__(debug=debug, profile=profile)
370 370 else:
371 371 super(Client, self).__init__(debug=debug)
372 372 if context is None:
373 373 context = zmq.Context.instance()
374 374 self._context = context
375 375 self._stop_spinning = Event()
376 376
377 377 if 'url_or_file' in extra_args:
378 378 url_file = extra_args['url_or_file']
379 379 warnings.warn("url_or_file arg no longer supported, use url_file", DeprecationWarning)
380 380
381 381 if url_file and util.is_url(url_file):
382 382 raise ValueError("single urls cannot be specified, url-files must be used.")
383 383
384 384 self._setup_profile_dir(self.profile, profile_dir, ipython_dir)
385 385
386 386 if self._cd is not None:
387 387 if url_file is None:
388 388 if not cluster_id:
389 389 client_json = 'ipcontroller-client.json'
390 390 else:
391 391 client_json = 'ipcontroller-%s-client.json' % cluster_id
392 392 url_file = pjoin(self._cd.security_dir, client_json)
393 393 if url_file is None:
394 394 raise ValueError(
395 395 "I can't find enough information to connect to a hub!"
396 396 " Please specify at least one of url_file or profile."
397 397 )
398 398
399 399 with open(url_file) as f:
400 400 cfg = json.load(f)
401 401
402 402 self._task_scheme = cfg['task_scheme']
403 403
404 404 # sync defaults from args, json:
405 405 if sshserver:
406 406 cfg['ssh'] = sshserver
407 407
408 408 location = cfg.setdefault('location', None)
409 409
410 410 proto,addr = cfg['interface'].split('://')
411 411 addr = util.disambiguate_ip_address(addr, location)
412 412 cfg['interface'] = "%s://%s" % (proto, addr)
413 413
414 414 # turn interface,port into full urls:
415 415 for key in ('control', 'task', 'mux', 'iopub', 'notification', 'registration'):
416 416 cfg[key] = cfg['interface'] + ':%i' % cfg[key]
417 417
418 418 url = cfg['registration']
419 419
420 420 if location is not None and addr == localhost():
421 421 # location specified, and connection is expected to be local
422 422 if not is_local_ip(location) and not sshserver:
423 423 # load ssh from JSON *only* if the controller is not on
424 424 # this machine
425 425 sshserver=cfg['ssh']
426 426 if not is_local_ip(location) and not sshserver:
427 427 # warn if no ssh specified, but SSH is probably needed
428 428 # This is only a warning, because the most likely cause
429 429 # is a local Controller on a laptop whose IP is dynamic
430 430 warnings.warn("""
431 431 Controller appears to be listening on localhost, but not on this machine.
432 432 If this is true, you should specify Client(...,sshserver='you@%s')
433 433 or instruct your controller to listen on an external IP."""%location,
434 434 RuntimeWarning)
435 435 elif not sshserver:
436 436 # otherwise sync with cfg
437 437 sshserver = cfg['ssh']
438 438
439 439 self._config = cfg
440 440
441 441 self._ssh = bool(sshserver or sshkey or password)
442 442 if self._ssh and sshserver is None:
443 443 # default to ssh via localhost
444 444 sshserver = addr
445 445 if self._ssh and password is None:
446 446 if tunnel.try_passwordless_ssh(sshserver, sshkey, paramiko):
447 447 password=False
448 448 else:
449 449 password = getpass("SSH Password for %s: "%sshserver)
450 450 ssh_kwargs = dict(keyfile=sshkey, password=password, paramiko=paramiko)
451 451
452 452 # configure and construct the session
453 453 try:
454 454 extra_args['packer'] = cfg['pack']
455 455 extra_args['unpacker'] = cfg['unpack']
456 456 extra_args['key'] = cast_bytes(cfg['key'])
457 457 extra_args['signature_scheme'] = cfg['signature_scheme']
458 458 except KeyError as exc:
459 459 msg = '\n'.join([
460 460 "Connection file is invalid (missing '{}'), possibly from an old version of IPython.",
461 461 "If you are reusing connection files, remove them and start ipcontroller again."
462 462 ])
463 463 raise ValueError(msg.format(exc.message))
464 464
465 465 self.session = Session(**extra_args)
466 466
467 467 self._query_socket = self._context.socket(zmq.DEALER)
468 468
469 469 if self._ssh:
470 470 tunnel.tunnel_connection(self._query_socket, cfg['registration'], sshserver, **ssh_kwargs)
471 471 else:
472 472 self._query_socket.connect(cfg['registration'])
473 473
474 474 self.session.debug = self.debug
475 475
476 476 self._notification_handlers = {'registration_notification' : self._register_engine,
477 477 'unregistration_notification' : self._unregister_engine,
478 478 'shutdown_notification' : lambda msg: self.close(),
479 479 }
480 480 self._queue_handlers = {'execute_reply' : self._handle_execute_reply,
481 481 'apply_reply' : self._handle_apply_reply}
482 482
483 483 try:
484 484 self._connect(sshserver, ssh_kwargs, timeout)
485 485 except:
486 486 self.close(linger=0)
487 487 raise
488 488
489 489 # last step: setup magics, if we are in IPython:
490 490
491 491 try:
492 492 ip = get_ipython()
493 493 except NameError:
494 494 return
495 495 else:
496 496 if 'px' not in ip.magics_manager.magics:
497 497 # in IPython but we are the first Client.
498 498 # activate a default view for parallel magics.
499 499 self.activate()
500 500
501 501 def __del__(self):
502 502 """cleanup sockets, but _not_ context."""
503 503 self.close()
504 504
505 505 def _setup_profile_dir(self, profile, profile_dir, ipython_dir):
506 506 if ipython_dir is None:
507 507 ipython_dir = get_ipython_dir()
508 508 if profile_dir is not None:
509 509 try:
510 510 self._cd = ProfileDir.find_profile_dir(profile_dir)
511 511 return
512 512 except ProfileDirError:
513 513 pass
514 514 elif profile is not None:
515 515 try:
516 516 self._cd = ProfileDir.find_profile_dir_by_name(
517 517 ipython_dir, profile)
518 518 return
519 519 except ProfileDirError:
520 520 pass
521 521 self._cd = None
522 522
523 523 def _update_engines(self, engines):
524 524 """Update our engines dict and _ids from a dict of the form: {id:uuid}."""
525 525 for k,v in iteritems(engines):
526 526 eid = int(k)
527 527 if eid not in self._engines:
528 528 self._ids.append(eid)
529 529 self._engines[eid] = v
530 530 self._ids = sorted(self._ids)
531 531 if sorted(self._engines.keys()) != list(range(len(self._engines))) and \
532 532 self._task_scheme == 'pure' and self._task_socket:
533 533 self._stop_scheduling_tasks()
534 534
535 535 def _stop_scheduling_tasks(self):
536 536 """Stop scheduling tasks because an engine has been unregistered
537 537 from a pure ZMQ scheduler.
538 538 """
539 539 self._task_socket.close()
540 540 self._task_socket = None
541 541 msg = "An engine has been unregistered, and we are using pure " +\
542 542 "ZMQ task scheduling. Task farming will be disabled."
543 543 if self.outstanding:
544 544 msg += " If you were running tasks when this happened, " +\
545 545 "some `outstanding` msg_ids may never resolve."
546 546 warnings.warn(msg, RuntimeWarning)
547 547
548 548 def _build_targets(self, targets):
549 549 """Turn valid target IDs or 'all' into two lists:
550 550 (int_ids, uuids).
551 551 """
552 552 if not self._ids:
553 553 # flush notification socket if no engines yet, just in case
554 554 if not self.ids:
555 555 raise error.NoEnginesRegistered("Can't build targets without any engines")
556 556
557 557 if targets is None:
558 558 targets = self._ids
559 559 elif isinstance(targets, string_types):
560 560 if targets.lower() == 'all':
561 561 targets = self._ids
562 562 else:
563 563 raise TypeError("%r not valid str target, must be 'all'"%(targets))
564 564 elif isinstance(targets, int):
565 565 if targets < 0:
566 566 targets = self.ids[targets]
567 567 if targets not in self._ids:
568 568 raise IndexError("No such engine: %i"%targets)
569 569 targets = [targets]
570 570
571 571 if isinstance(targets, slice):
572 572 indices = list(range(len(self._ids))[targets])
573 573 ids = self.ids
574 574 targets = [ ids[i] for i in indices ]
575 575
576 576 if not isinstance(targets, (tuple, list, xrange)):
577 577 raise TypeError("targets by int/slice/collection of ints only, not %s"%(type(targets)))
578 578
579 579 return [cast_bytes(self._engines[t]) for t in targets], list(targets)
580 580
581 581 def _connect(self, sshserver, ssh_kwargs, timeout):
582 582 """setup all our socket connections to the cluster. This is called from
583 583 __init__."""
584 584
585 585 # Maybe allow reconnecting?
586 586 if self._connected:
587 587 return
588 588 self._connected=True
589 589
590 590 def connect_socket(s, url):
591 591 if self._ssh:
592 592 return tunnel.tunnel_connection(s, url, sshserver, **ssh_kwargs)
593 593 else:
594 594 return s.connect(url)
595 595
596 596 self.session.send(self._query_socket, 'connection_request')
597 597 # use Poller because zmq.select has wrong units in pyzmq 2.1.7
598 598 poller = zmq.Poller()
599 599 poller.register(self._query_socket, zmq.POLLIN)
600 600 # poll expects milliseconds, timeout is seconds
601 601 evts = poller.poll(timeout*1000)
602 602 if not evts:
603 603 raise error.TimeoutError("Hub connection request timed out")
604 604 idents,msg = self.session.recv(self._query_socket,mode=0)
605 605 if self.debug:
606 606 pprint(msg)
607 607 content = msg['content']
608 608 # self._config['registration'] = dict(content)
609 609 cfg = self._config
610 610 if content['status'] == 'ok':
611 611 self._mux_socket = self._context.socket(zmq.DEALER)
612 612 connect_socket(self._mux_socket, cfg['mux'])
613 613
614 614 self._task_socket = self._context.socket(zmq.DEALER)
615 615 connect_socket(self._task_socket, cfg['task'])
616 616
617 617 self._notification_socket = self._context.socket(zmq.SUB)
618 618 self._notification_socket.setsockopt(zmq.SUBSCRIBE, b'')
619 619 connect_socket(self._notification_socket, cfg['notification'])
620 620
621 621 self._control_socket = self._context.socket(zmq.DEALER)
622 622 connect_socket(self._control_socket, cfg['control'])
623 623
624 624 self._iopub_socket = self._context.socket(zmq.SUB)
625 625 self._iopub_socket.setsockopt(zmq.SUBSCRIBE, b'')
626 626 connect_socket(self._iopub_socket, cfg['iopub'])
627 627
628 628 self._update_engines(dict(content['engines']))
629 629 else:
630 630 self._connected = False
631 631 raise Exception("Failed to connect!")
632 632
633 633 #--------------------------------------------------------------------------
634 634 # handlers and callbacks for incoming messages
635 635 #--------------------------------------------------------------------------
636 636
637 637 def _unwrap_exception(self, content):
638 638 """unwrap exception, and remap engine_id to int."""
639 639 e = error.unwrap_exception(content)
640 640 # print e.traceback
641 641 if e.engine_info:
642 642 e_uuid = e.engine_info['engine_uuid']
643 643 eid = self._engines[e_uuid]
644 644 e.engine_info['engine_id'] = eid
645 645 return e
646 646
647 647 def _extract_metadata(self, msg):
648 648 header = msg['header']
649 649 parent = msg['parent_header']
650 650 msg_meta = msg['metadata']
651 651 content = msg['content']
652 652 md = {'msg_id' : parent['msg_id'],
653 653 'received' : datetime.now(),
654 654 'engine_uuid' : msg_meta.get('engine', None),
655 655 'follow' : msg_meta.get('follow', []),
656 656 'after' : msg_meta.get('after', []),
657 657 'status' : content['status'],
658 658 }
659 659
660 660 if md['engine_uuid'] is not None:
661 661 md['engine_id'] = self._engines.get(md['engine_uuid'], None)
662 662
663 663 if 'date' in parent:
664 664 md['submitted'] = parent['date']
665 665 if 'started' in msg_meta:
666 666 md['started'] = parse_date(msg_meta['started'])
667 667 if 'date' in header:
668 668 md['completed'] = header['date']
669 669 return md
670 670
671 671 def _register_engine(self, msg):
672 672 """Register a new engine, and update our connection info."""
673 673 content = msg['content']
674 674 eid = content['id']
675 675 d = {eid : content['uuid']}
676 676 self._update_engines(d)
677 677
678 678 def _unregister_engine(self, msg):
679 679 """Unregister an engine that has died."""
680 680 content = msg['content']
681 681 eid = int(content['id'])
682 682 if eid in self._ids:
683 683 self._ids.remove(eid)
684 684 uuid = self._engines.pop(eid)
685 685
686 686 self._handle_stranded_msgs(eid, uuid)
687 687
688 688 if self._task_socket and self._task_scheme == 'pure':
689 689 self._stop_scheduling_tasks()
690 690
691 691 def _handle_stranded_msgs(self, eid, uuid):
692 692 """Handle messages known to be on an engine when the engine unregisters.
693 693
694 694 It is possible that this will fire prematurely - that is, an engine will
695 695 go down after completing a result, and the client will be notified
696 696 of the unregistration and later receive the successful result.
697 697 """
698 698
699 699 outstanding = self._outstanding_dict[uuid]
700 700
701 701 for msg_id in list(outstanding):
702 702 if msg_id in self.results:
703 703 # we already
704 704 continue
705 705 try:
706 706 raise error.EngineError("Engine %r died while running task %r"%(eid, msg_id))
707 707 except:
708 708 content = error.wrap_exception()
709 709 # build a fake message:
710 710 msg = self.session.msg('apply_reply', content=content)
711 711 msg['parent_header']['msg_id'] = msg_id
712 712 msg['metadata']['engine'] = uuid
713 713 self._handle_apply_reply(msg)
714 714
715 715 def _handle_execute_reply(self, msg):
716 716 """Save the reply to an execute_request into our results.
717 717
718 718 execute messages are never actually used. apply is used instead.
719 719 """
720 720
721 721 parent = msg['parent_header']
722 722 msg_id = parent['msg_id']
723 723 if msg_id not in self.outstanding:
724 724 if msg_id in self.history:
725 725 print("got stale result: %s"%msg_id)
726 726 else:
727 727 print("got unknown result: %s"%msg_id)
728 728 else:
729 729 self.outstanding.remove(msg_id)
730 730
731 731 content = msg['content']
732 732 header = msg['header']
733 733
734 734 # construct metadata:
735 735 md = self.metadata[msg_id]
736 736 md.update(self._extract_metadata(msg))
737 737 # is this redundant?
738 738 self.metadata[msg_id] = md
739 739
740 740 e_outstanding = self._outstanding_dict[md['engine_uuid']]
741 741 if msg_id in e_outstanding:
742 742 e_outstanding.remove(msg_id)
743 743
744 744 # construct result:
745 745 if content['status'] == 'ok':
746 746 self.results[msg_id] = ExecuteReply(msg_id, content, md)
747 747 elif content['status'] == 'aborted':
748 748 self.results[msg_id] = error.TaskAborted(msg_id)
749 749 elif content['status'] == 'resubmitted':
750 750 # TODO: handle resubmission
751 751 pass
752 752 else:
753 753 self.results[msg_id] = self._unwrap_exception(content)
754 754
755 755 def _handle_apply_reply(self, msg):
756 756 """Save the reply to an apply_request into our results."""
757 757 parent = msg['parent_header']
758 758 msg_id = parent['msg_id']
759 759 if msg_id not in self.outstanding:
760 760 if msg_id in self.history:
761 761 print("got stale result: %s"%msg_id)
762 762 print(self.results[msg_id])
763 763 print(msg)
764 764 else:
765 765 print("got unknown result: %s"%msg_id)
766 766 else:
767 767 self.outstanding.remove(msg_id)
768 768 content = msg['content']
769 769 header = msg['header']
770 770
771 771 # construct metadata:
772 772 md = self.metadata[msg_id]
773 773 md.update(self._extract_metadata(msg))
774 774 # is this redundant?
775 775 self.metadata[msg_id] = md
776 776
777 777 e_outstanding = self._outstanding_dict[md['engine_uuid']]
778 778 if msg_id in e_outstanding:
779 779 e_outstanding.remove(msg_id)
780 780
781 781 # construct result:
782 782 if content['status'] == 'ok':
783 783 self.results[msg_id] = serialize.unserialize_object(msg['buffers'])[0]
784 784 elif content['status'] == 'aborted':
785 785 self.results[msg_id] = error.TaskAborted(msg_id)
786 786 elif content['status'] == 'resubmitted':
787 787 # TODO: handle resubmission
788 788 pass
789 789 else:
790 790 self.results[msg_id] = self._unwrap_exception(content)
791 791
792 792 def _flush_notifications(self):
793 793 """Flush notifications of engine registrations waiting
794 794 in ZMQ queue."""
795 795 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
796 796 while msg is not None:
797 797 if self.debug:
798 798 pprint(msg)
799 799 msg_type = msg['header']['msg_type']
800 800 handler = self._notification_handlers.get(msg_type, None)
801 801 if handler is None:
802 802 raise Exception("Unhandled message type: %s" % msg_type)
803 803 else:
804 804 handler(msg)
805 805 idents,msg = self.session.recv(self._notification_socket, mode=zmq.NOBLOCK)
806 806
807 807 def _flush_results(self, sock):
808 808 """Flush task or queue results waiting in ZMQ queue."""
809 809 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
810 810 while msg is not None:
811 811 if self.debug:
812 812 pprint(msg)
813 813 msg_type = msg['header']['msg_type']
814 814 handler = self._queue_handlers.get(msg_type, None)
815 815 if handler is None:
816 816 raise Exception("Unhandled message type: %s" % msg_type)
817 817 else:
818 818 handler(msg)
819 819 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
820 820
821 821 def _flush_control(self, sock):
822 822 """Flush replies from the control channel waiting
823 823 in the ZMQ queue.
824 824
825 825 Currently: ignore them."""
826 826 if self._ignored_control_replies <= 0:
827 827 return
828 828 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
829 829 while msg is not None:
830 830 self._ignored_control_replies -= 1
831 831 if self.debug:
832 832 pprint(msg)
833 833 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
834 834
835 835 def _flush_ignored_control(self):
836 836 """flush ignored control replies"""
837 837 while self._ignored_control_replies > 0:
838 838 self.session.recv(self._control_socket)
839 839 self._ignored_control_replies -= 1
840 840
841 841 def _flush_ignored_hub_replies(self):
842 842 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
843 843 while msg is not None:
844 844 ident,msg = self.session.recv(self._query_socket, mode=zmq.NOBLOCK)
845 845
846 846 def _flush_iopub(self, sock):
847 847 """Flush replies from the iopub channel waiting
848 848 in the ZMQ queue.
849 849 """
850 850 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
851 851 while msg is not None:
852 852 if self.debug:
853 853 pprint(msg)
854 854 parent = msg['parent_header']
855 855 # ignore IOPub messages with no parent.
856 856 # Caused by print statements or warnings from before the first execution.
857 857 if not parent:
858 858 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
859 859 continue
860 860 msg_id = parent['msg_id']
861 861 content = msg['content']
862 862 header = msg['header']
863 863 msg_type = msg['header']['msg_type']
864 864
865 865 # init metadata:
866 866 md = self.metadata[msg_id]
867 867
868 868 if msg_type == 'stream':
869 869 name = content['name']
870 870 s = md[name] or ''
871 871 md[name] = s + content['data']
872 872 elif msg_type == 'error':
873 873 md.update({'error' : self._unwrap_exception(content)})
874 874 elif msg_type == 'execute_input':
875 875 md.update({'execute_input' : content['code']})
876 876 elif msg_type == 'display_data':
877 877 md['outputs'].append(content)
878 878 elif msg_type == 'execute_result':
879 879 md['execute_result'] = content
880 880 elif msg_type == 'data_message':
881 881 data, remainder = serialize.unserialize_object(msg['buffers'])
882 882 md['data'].update(data)
883 883 elif msg_type == 'status':
884 884 # idle message comes after all outputs
885 885 if content['execution_state'] == 'idle':
886 886 md['outputs_ready'] = True
887 887 else:
888 888 # unhandled msg_type (status, etc.)
889 889 pass
890 890
891 891 # reduntant?
892 892 self.metadata[msg_id] = md
893 893
894 894 idents,msg = self.session.recv(sock, mode=zmq.NOBLOCK)
895 895
896 896 #--------------------------------------------------------------------------
897 897 # len, getitem
898 898 #--------------------------------------------------------------------------
899 899
900 900 def __len__(self):
901 901 """len(client) returns # of engines."""
902 902 return len(self.ids)
903 903
904 904 def __getitem__(self, key):
905 905 """index access returns DirectView multiplexer objects
906 906
907 907 Must be int, slice, or list/tuple/xrange of ints"""
908 908 if not isinstance(key, (int, slice, tuple, list, xrange)):
909 909 raise TypeError("key by int/slice/iterable of ints only, not %s"%(type(key)))
910 910 else:
911 911 return self.direct_view(key)
912 912
913 913 def __iter__(self):
914 914 """Since we define getitem, Client is iterable
915 915
916 916 but unless we also define __iter__, it won't work correctly unless engine IDs
917 917 start at zero and are continuous.
918 918 """
919 919 for eid in self.ids:
920 920 yield self.direct_view(eid)
921 921
922 922 #--------------------------------------------------------------------------
923 923 # Begin public methods
924 924 #--------------------------------------------------------------------------
925 925
926 926 @property
927 927 def ids(self):
928 928 """Always up-to-date ids property."""
929 929 self._flush_notifications()
930 930 # always copy:
931 931 return list(self._ids)
932 932
933 933 def activate(self, targets='all', suffix=''):
934 934 """Create a DirectView and register it with IPython magics
935 935
936 936 Defines the magics `%px, %autopx, %pxresult, %%px`
937 937
938 938 Parameters
939 939 ----------
940 940
941 941 targets: int, list of ints, or 'all'
942 942 The engines on which the view's magics will run
943 943 suffix: str [default: '']
944 944 The suffix, if any, for the magics. This allows you to have
945 945 multiple views associated with parallel magics at the same time.
946 946
947 947 e.g. ``rc.activate(targets=0, suffix='0')`` will give you
948 948 the magics ``%px0``, ``%pxresult0``, etc. for running magics just
949 949 on engine 0.
950 950 """
951 951 view = self.direct_view(targets)
952 952 view.block = True
953 953 view.activate(suffix)
954 954 return view
955 955
956 956 def close(self, linger=None):
957 957 """Close my zmq Sockets
958 958
959 959 If `linger`, set the zmq LINGER socket option,
960 960 which allows discarding of messages.
961 961 """
962 962 if self._closed:
963 963 return
964 964 self.stop_spin_thread()
965 965 snames = [ trait for trait in self.trait_names() if trait.endswith("socket") ]
966 966 for name in snames:
967 967 socket = getattr(self, name)
968 968 if socket is not None and not socket.closed:
969 969 if linger is not None:
970 970 socket.close(linger=linger)
971 971 else:
972 972 socket.close()
973 973 self._closed = True
974 974
975 975 def _spin_every(self, interval=1):
976 976 """target func for use in spin_thread"""
977 977 while True:
978 978 if self._stop_spinning.is_set():
979 979 return
980 980 time.sleep(interval)
981 981 self.spin()
982 982
983 983 def spin_thread(self, interval=1):
984 984 """call Client.spin() in a background thread on some regular interval
985 985
986 986 This helps ensure that messages don't pile up too much in the zmq queue
987 987 while you are working on other things, or just leaving an idle terminal.
988 988
989 989 It also helps limit potential padding of the `received` timestamp
990 990 on AsyncResult objects, used for timings.
991 991
992 992 Parameters
993 993 ----------
994 994
995 995 interval : float, optional
996 996 The interval on which to spin the client in the background thread
997 997 (simply passed to time.sleep).
998 998
999 999 Notes
1000 1000 -----
1001 1001
1002 1002 For precision timing, you may want to use this method to put a bound
1003 1003 on the jitter (in seconds) in `received` timestamps used
1004 1004 in AsyncResult.wall_time.
1005 1005
1006 1006 """
1007 1007 if self._spin_thread is not None:
1008 1008 self.stop_spin_thread()
1009 1009 self._stop_spinning.clear()
1010 1010 self._spin_thread = Thread(target=self._spin_every, args=(interval,))
1011 1011 self._spin_thread.daemon = True
1012 1012 self._spin_thread.start()
1013 1013
1014 1014 def stop_spin_thread(self):
1015 1015 """stop background spin_thread, if any"""
1016 1016 if self._spin_thread is not None:
1017 1017 self._stop_spinning.set()
1018 1018 self._spin_thread.join()
1019 1019 self._spin_thread = None
1020 1020
1021 1021 def spin(self):
1022 1022 """Flush any registration notifications and execution results
1023 1023 waiting in the ZMQ queue.
1024 1024 """
1025 1025 if self._notification_socket:
1026 1026 self._flush_notifications()
1027 1027 if self._iopub_socket:
1028 1028 self._flush_iopub(self._iopub_socket)
1029 1029 if self._mux_socket:
1030 1030 self._flush_results(self._mux_socket)
1031 1031 if self._task_socket:
1032 1032 self._flush_results(self._task_socket)
1033 1033 if self._control_socket:
1034 1034 self._flush_control(self._control_socket)
1035 1035 if self._query_socket:
1036 1036 self._flush_ignored_hub_replies()
1037 1037
1038 1038 def wait(self, jobs=None, timeout=-1):
1039 1039 """waits on one or more `jobs`, for up to `timeout` seconds.
1040 1040
1041 1041 Parameters
1042 1042 ----------
1043 1043
1044 1044 jobs : int, str, or list of ints and/or strs, or one or more AsyncResult objects
1045 1045 ints are indices to self.history
1046 1046 strs are msg_ids
1047 1047 default: wait on all outstanding messages
1048 1048 timeout : float
1049 1049 a time in seconds, after which to give up.
1050 1050 default is -1, which means no timeout
1051 1051
1052 1052 Returns
1053 1053 -------
1054 1054
1055 1055 True : when all msg_ids are done
1056 1056 False : timeout reached, some msg_ids still outstanding
1057 1057 """
1058 1058 tic = time.time()
1059 1059 if jobs is None:
1060 1060 theids = self.outstanding
1061 1061 else:
1062 1062 if isinstance(jobs, string_types + (int, AsyncResult)):
1063 1063 jobs = [jobs]
1064 1064 theids = set()
1065 1065 for job in jobs:
1066 1066 if isinstance(job, int):
1067 1067 # index access
1068 1068 job = self.history[job]
1069 1069 elif isinstance(job, AsyncResult):
1070 1070 theids.update(job.msg_ids)
1071 1071 continue
1072 1072 theids.add(job)
1073 1073 if not theids.intersection(self.outstanding):
1074 1074 return True
1075 1075 self.spin()
1076 1076 while theids.intersection(self.outstanding):
1077 1077 if timeout >= 0 and ( time.time()-tic ) > timeout:
1078 1078 break
1079 1079 time.sleep(1e-3)
1080 1080 self.spin()
1081 1081 return len(theids.intersection(self.outstanding)) == 0
1082 1082
1083 1083 #--------------------------------------------------------------------------
1084 1084 # Control methods
1085 1085 #--------------------------------------------------------------------------
1086 1086
1087 1087 @spin_first
1088 1088 def clear(self, targets=None, block=None):
1089 1089 """Clear the namespace in target(s)."""
1090 1090 block = self.block if block is None else block
1091 1091 targets = self._build_targets(targets)[0]
1092 1092 for t in targets:
1093 1093 self.session.send(self._control_socket, 'clear_request', content={}, ident=t)
1094 1094 error = False
1095 1095 if block:
1096 1096 self._flush_ignored_control()
1097 1097 for i in range(len(targets)):
1098 1098 idents,msg = self.session.recv(self._control_socket,0)
1099 1099 if self.debug:
1100 1100 pprint(msg)
1101 1101 if msg['content']['status'] != 'ok':
1102 1102 error = self._unwrap_exception(msg['content'])
1103 1103 else:
1104 1104 self._ignored_control_replies += len(targets)
1105 1105 if error:
1106 1106 raise error
1107 1107
1108 1108
1109 1109 @spin_first
1110 1110 def abort(self, jobs=None, targets=None, block=None):
1111 1111 """Abort specific jobs from the execution queues of target(s).
1112 1112
1113 1113 This is a mechanism to prevent jobs that have already been submitted
1114 1114 from executing.
1115 1115
1116 1116 Parameters
1117 1117 ----------
1118 1118
1119 1119 jobs : msg_id, list of msg_ids, or AsyncResult
1120 1120 The jobs to be aborted
1121 1121
1122 1122 If unspecified/None: abort all outstanding jobs.
1123 1123
1124 1124 """
1125 1125 block = self.block if block is None else block
1126 1126 jobs = jobs if jobs is not None else list(self.outstanding)
1127 1127 targets = self._build_targets(targets)[0]
1128 1128
1129 1129 msg_ids = []
1130 1130 if isinstance(jobs, string_types + (AsyncResult,)):
1131 1131 jobs = [jobs]
1132 1132 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1133 1133 if bad_ids:
1134 1134 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1135 1135 for j in jobs:
1136 1136 if isinstance(j, AsyncResult):
1137 1137 msg_ids.extend(j.msg_ids)
1138 1138 else:
1139 1139 msg_ids.append(j)
1140 1140 content = dict(msg_ids=msg_ids)
1141 1141 for t in targets:
1142 1142 self.session.send(self._control_socket, 'abort_request',
1143 1143 content=content, ident=t)
1144 1144 error = False
1145 1145 if block:
1146 1146 self._flush_ignored_control()
1147 1147 for i in range(len(targets)):
1148 1148 idents,msg = self.session.recv(self._control_socket,0)
1149 1149 if self.debug:
1150 1150 pprint(msg)
1151 1151 if msg['content']['status'] != 'ok':
1152 1152 error = self._unwrap_exception(msg['content'])
1153 1153 else:
1154 1154 self._ignored_control_replies += len(targets)
1155 1155 if error:
1156 1156 raise error
1157 1157
1158 1158 @spin_first
1159 1159 def shutdown(self, targets='all', restart=False, hub=False, block=None):
1160 1160 """Terminates one or more engine processes, optionally including the hub.
1161 1161
1162 1162 Parameters
1163 1163 ----------
1164 1164
1165 1165 targets: list of ints or 'all' [default: all]
1166 1166 Which engines to shutdown.
1167 1167 hub: bool [default: False]
1168 1168 Whether to include the Hub. hub=True implies targets='all'.
1169 1169 block: bool [default: self.block]
1170 1170 Whether to wait for clean shutdown replies or not.
1171 1171 restart: bool [default: False]
1172 1172 NOT IMPLEMENTED
1173 1173 whether to restart engines after shutting them down.
1174 1174 """
1175 1175 from IPython.parallel.error import NoEnginesRegistered
1176 1176 if restart:
1177 1177 raise NotImplementedError("Engine restart is not yet implemented")
1178 1178
1179 1179 block = self.block if block is None else block
1180 1180 if hub:
1181 1181 targets = 'all'
1182 1182 try:
1183 1183 targets = self._build_targets(targets)[0]
1184 1184 except NoEnginesRegistered:
1185 1185 targets = []
1186 1186 for t in targets:
1187 1187 self.session.send(self._control_socket, 'shutdown_request',
1188 1188 content={'restart':restart},ident=t)
1189 1189 error = False
1190 1190 if block or hub:
1191 1191 self._flush_ignored_control()
1192 1192 for i in range(len(targets)):
1193 1193 idents,msg = self.session.recv(self._control_socket, 0)
1194 1194 if self.debug:
1195 1195 pprint(msg)
1196 1196 if msg['content']['status'] != 'ok':
1197 1197 error = self._unwrap_exception(msg['content'])
1198 1198 else:
1199 1199 self._ignored_control_replies += len(targets)
1200 1200
1201 1201 if hub:
1202 1202 time.sleep(0.25)
1203 1203 self.session.send(self._query_socket, 'shutdown_request')
1204 1204 idents,msg = self.session.recv(self._query_socket, 0)
1205 1205 if self.debug:
1206 1206 pprint(msg)
1207 1207 if msg['content']['status'] != 'ok':
1208 1208 error = self._unwrap_exception(msg['content'])
1209 1209
1210 1210 if error:
1211 1211 raise error
1212 1212
1213 1213 #--------------------------------------------------------------------------
1214 1214 # Execution related methods
1215 1215 #--------------------------------------------------------------------------
1216 1216
1217 1217 def _maybe_raise(self, result):
1218 1218 """wrapper for maybe raising an exception if apply failed."""
1219 1219 if isinstance(result, error.RemoteError):
1220 1220 raise result
1221 1221
1222 1222 return result
1223 1223
1224 1224 def send_apply_request(self, socket, f, args=None, kwargs=None, metadata=None, track=False,
1225 1225 ident=None):
1226 1226 """construct and send an apply message via a socket.
1227 1227
1228 1228 This is the principal method with which all engine execution is performed by views.
1229 1229 """
1230 1230
1231 1231 if self._closed:
1232 1232 raise RuntimeError("Client cannot be used after its sockets have been closed")
1233 1233
1234 1234 # defaults:
1235 1235 args = args if args is not None else []
1236 1236 kwargs = kwargs if kwargs is not None else {}
1237 1237 metadata = metadata if metadata is not None else {}
1238 1238
1239 1239 # validate arguments
1240 1240 if not callable(f) and not isinstance(f, Reference):
1241 1241 raise TypeError("f must be callable, not %s"%type(f))
1242 1242 if not isinstance(args, (tuple, list)):
1243 1243 raise TypeError("args must be tuple or list, not %s"%type(args))
1244 1244 if not isinstance(kwargs, dict):
1245 1245 raise TypeError("kwargs must be dict, not %s"%type(kwargs))
1246 1246 if not isinstance(metadata, dict):
1247 1247 raise TypeError("metadata must be dict, not %s"%type(metadata))
1248 1248
1249 1249 bufs = serialize.pack_apply_message(f, args, kwargs,
1250 1250 buffer_threshold=self.session.buffer_threshold,
1251 1251 item_threshold=self.session.item_threshold,
1252 1252 )
1253 1253
1254 1254 msg = self.session.send(socket, "apply_request", buffers=bufs, ident=ident,
1255 1255 metadata=metadata, track=track)
1256 1256
1257 1257 msg_id = msg['header']['msg_id']
1258 1258 self.outstanding.add(msg_id)
1259 1259 if ident:
1260 1260 # possibly routed to a specific engine
1261 1261 if isinstance(ident, list):
1262 1262 ident = ident[-1]
1263 1263 if ident in self._engines.values():
1264 1264 # save for later, in case of engine death
1265 1265 self._outstanding_dict[ident].add(msg_id)
1266 1266 self.history.append(msg_id)
1267 1267 self.metadata[msg_id]['submitted'] = datetime.now()
1268 1268
1269 1269 return msg
1270 1270
1271 1271 def send_execute_request(self, socket, code, silent=True, metadata=None, ident=None):
1272 1272 """construct and send an execute request via a socket.
1273 1273
1274 1274 """
1275 1275
1276 1276 if self._closed:
1277 1277 raise RuntimeError("Client cannot be used after its sockets have been closed")
1278 1278
1279 1279 # defaults:
1280 1280 metadata = metadata if metadata is not None else {}
1281 1281
1282 1282 # validate arguments
1283 1283 if not isinstance(code, string_types):
1284 1284 raise TypeError("code must be text, not %s" % type(code))
1285 1285 if not isinstance(metadata, dict):
1286 1286 raise TypeError("metadata must be dict, not %s" % type(metadata))
1287 1287
1288 1288 content = dict(code=code, silent=bool(silent), user_expressions={})
1289 1289
1290 1290
1291 1291 msg = self.session.send(socket, "execute_request", content=content, ident=ident,
1292 1292 metadata=metadata)
1293 1293
1294 1294 msg_id = msg['header']['msg_id']
1295 1295 self.outstanding.add(msg_id)
1296 1296 if ident:
1297 1297 # possibly routed to a specific engine
1298 1298 if isinstance(ident, list):
1299 1299 ident = ident[-1]
1300 1300 if ident in self._engines.values():
1301 1301 # save for later, in case of engine death
1302 1302 self._outstanding_dict[ident].add(msg_id)
1303 1303 self.history.append(msg_id)
1304 1304 self.metadata[msg_id]['submitted'] = datetime.now()
1305 1305
1306 1306 return msg
1307 1307
1308 1308 #--------------------------------------------------------------------------
1309 1309 # construct a View object
1310 1310 #--------------------------------------------------------------------------
1311 1311
1312 1312 def load_balanced_view(self, targets=None):
1313 1313 """construct a DirectView object.
1314 1314
1315 1315 If no arguments are specified, create a LoadBalancedView
1316 1316 using all engines.
1317 1317
1318 1318 Parameters
1319 1319 ----------
1320 1320
1321 1321 targets: list,slice,int,etc. [default: use all engines]
1322 1322 The subset of engines across which to load-balance
1323 1323 """
1324 1324 if targets == 'all':
1325 1325 targets = None
1326 1326 if targets is not None:
1327 1327 targets = self._build_targets(targets)[1]
1328 1328 return LoadBalancedView(client=self, socket=self._task_socket, targets=targets)
1329 1329
1330 1330 def direct_view(self, targets='all'):
1331 1331 """construct a DirectView object.
1332 1332
1333 1333 If no targets are specified, create a DirectView using all engines.
1334 1334
1335 1335 rc.direct_view('all') is distinguished from rc[:] in that 'all' will
1336 1336 evaluate the target engines at each execution, whereas rc[:] will connect to
1337 1337 all *current* engines, and that list will not change.
1338 1338
1339 1339 That is, 'all' will always use all engines, whereas rc[:] will not use
1340 1340 engines added after the DirectView is constructed.
1341 1341
1342 1342 Parameters
1343 1343 ----------
1344 1344
1345 1345 targets: list,slice,int,etc. [default: use all engines]
1346 1346 The engines to use for the View
1347 1347 """
1348 1348 single = isinstance(targets, int)
1349 1349 # allow 'all' to be lazily evaluated at each execution
1350 1350 if targets != 'all':
1351 1351 targets = self._build_targets(targets)[1]
1352 1352 if single:
1353 1353 targets = targets[0]
1354 1354 return DirectView(client=self, socket=self._mux_socket, targets=targets)
1355 1355
1356 1356 #--------------------------------------------------------------------------
1357 1357 # Query methods
1358 1358 #--------------------------------------------------------------------------
1359 1359
1360 1360 @spin_first
1361 1361 def get_result(self, indices_or_msg_ids=None, block=None):
1362 1362 """Retrieve a result by msg_id or history index, wrapped in an AsyncResult object.
1363 1363
1364 1364 If the client already has the results, no request to the Hub will be made.
1365 1365
1366 1366 This is a convenient way to construct AsyncResult objects, which are wrappers
1367 1367 that include metadata about execution, and allow for awaiting results that
1368 1368 were not submitted by this Client.
1369 1369
1370 1370 It can also be a convenient way to retrieve the metadata associated with
1371 1371 blocking execution, since it always retrieves
1372 1372
1373 1373 Examples
1374 1374 --------
1375 1375 ::
1376 1376
1377 1377 In [10]: r = client.apply()
1378 1378
1379 1379 Parameters
1380 1380 ----------
1381 1381
1382 1382 indices_or_msg_ids : integer history index, str msg_id, or list of either
1383 1383 The indices or msg_ids of indices to be retrieved
1384 1384
1385 1385 block : bool
1386 1386 Whether to wait for the result to be done
1387 1387
1388 1388 Returns
1389 1389 -------
1390 1390
1391 1391 AsyncResult
1392 1392 A single AsyncResult object will always be returned.
1393 1393
1394 1394 AsyncHubResult
1395 1395 A subclass of AsyncResult that retrieves results from the Hub
1396 1396
1397 1397 """
1398 1398 block = self.block if block is None else block
1399 1399 if indices_or_msg_ids is None:
1400 1400 indices_or_msg_ids = -1
1401 1401
1402 1402 single_result = False
1403 1403 if not isinstance(indices_or_msg_ids, (list,tuple)):
1404 1404 indices_or_msg_ids = [indices_or_msg_ids]
1405 1405 single_result = True
1406 1406
1407 1407 theids = []
1408 1408 for id in indices_or_msg_ids:
1409 1409 if isinstance(id, int):
1410 1410 id = self.history[id]
1411 1411 if not isinstance(id, string_types):
1412 1412 raise TypeError("indices must be str or int, not %r"%id)
1413 1413 theids.append(id)
1414 1414
1415 1415 local_ids = [msg_id for msg_id in theids if (msg_id in self.outstanding or msg_id in self.results)]
1416 1416 remote_ids = [msg_id for msg_id in theids if msg_id not in local_ids]
1417 1417
1418 1418 # given single msg_id initially, get_result shot get the result itself,
1419 1419 # not a length-one list
1420 1420 if single_result:
1421 1421 theids = theids[0]
1422 1422
1423 1423 if remote_ids:
1424 1424 ar = AsyncHubResult(self, msg_ids=theids)
1425 1425 else:
1426 1426 ar = AsyncResult(self, msg_ids=theids)
1427 1427
1428 1428 if block:
1429 1429 ar.wait()
1430 1430
1431 1431 return ar
1432 1432
1433 1433 @spin_first
1434 1434 def resubmit(self, indices_or_msg_ids=None, metadata=None, block=None):
1435 1435 """Resubmit one or more tasks.
1436 1436
1437 1437 in-flight tasks may not be resubmitted.
1438 1438
1439 1439 Parameters
1440 1440 ----------
1441 1441
1442 1442 indices_or_msg_ids : integer history index, str msg_id, or list of either
1443 1443 The indices or msg_ids of indices to be retrieved
1444 1444
1445 1445 block : bool
1446 1446 Whether to wait for the result to be done
1447 1447
1448 1448 Returns
1449 1449 -------
1450 1450
1451 1451 AsyncHubResult
1452 1452 A subclass of AsyncResult that retrieves results from the Hub
1453 1453
1454 1454 """
1455 1455 block = self.block if block is None else block
1456 1456 if indices_or_msg_ids is None:
1457 1457 indices_or_msg_ids = -1
1458 1458
1459 1459 if not isinstance(indices_or_msg_ids, (list,tuple)):
1460 1460 indices_or_msg_ids = [indices_or_msg_ids]
1461 1461
1462 1462 theids = []
1463 1463 for id in indices_or_msg_ids:
1464 1464 if isinstance(id, int):
1465 1465 id = self.history[id]
1466 1466 if not isinstance(id, string_types):
1467 1467 raise TypeError("indices must be str or int, not %r"%id)
1468 1468 theids.append(id)
1469 1469
1470 1470 content = dict(msg_ids = theids)
1471 1471
1472 1472 self.session.send(self._query_socket, 'resubmit_request', content)
1473 1473
1474 1474 zmq.select([self._query_socket], [], [])
1475 1475 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1476 1476 if self.debug:
1477 1477 pprint(msg)
1478 1478 content = msg['content']
1479 1479 if content['status'] != 'ok':
1480 1480 raise self._unwrap_exception(content)
1481 1481 mapping = content['resubmitted']
1482 1482 new_ids = [ mapping[msg_id] for msg_id in theids ]
1483 1483
1484 1484 ar = AsyncHubResult(self, msg_ids=new_ids)
1485 1485
1486 1486 if block:
1487 1487 ar.wait()
1488 1488
1489 1489 return ar
1490 1490
1491 1491 @spin_first
1492 1492 def result_status(self, msg_ids, status_only=True):
1493 1493 """Check on the status of the result(s) of the apply request with `msg_ids`.
1494 1494
1495 1495 If status_only is False, then the actual results will be retrieved, else
1496 1496 only the status of the results will be checked.
1497 1497
1498 1498 Parameters
1499 1499 ----------
1500 1500
1501 1501 msg_ids : list of msg_ids
1502 1502 if int:
1503 1503 Passed as index to self.history for convenience.
1504 1504 status_only : bool (default: True)
1505 1505 if False:
1506 1506 Retrieve the actual results of completed tasks.
1507 1507
1508 1508 Returns
1509 1509 -------
1510 1510
1511 1511 results : dict
1512 1512 There will always be the keys 'pending' and 'completed', which will
1513 1513 be lists of msg_ids that are incomplete or complete. If `status_only`
1514 1514 is False, then completed results will be keyed by their `msg_id`.
1515 1515 """
1516 1516 if not isinstance(msg_ids, (list,tuple)):
1517 1517 msg_ids = [msg_ids]
1518 1518
1519 1519 theids = []
1520 1520 for msg_id in msg_ids:
1521 1521 if isinstance(msg_id, int):
1522 1522 msg_id = self.history[msg_id]
1523 1523 if not isinstance(msg_id, string_types):
1524 1524 raise TypeError("msg_ids must be str, not %r"%msg_id)
1525 1525 theids.append(msg_id)
1526 1526
1527 1527 completed = []
1528 1528 local_results = {}
1529 1529
1530 1530 # comment this block out to temporarily disable local shortcut:
1531 1531 for msg_id in theids:
1532 1532 if msg_id in self.results:
1533 1533 completed.append(msg_id)
1534 1534 local_results[msg_id] = self.results[msg_id]
1535 1535 theids.remove(msg_id)
1536 1536
1537 1537 if theids: # some not locally cached
1538 1538 content = dict(msg_ids=theids, status_only=status_only)
1539 1539 msg = self.session.send(self._query_socket, "result_request", content=content)
1540 1540 zmq.select([self._query_socket], [], [])
1541 1541 idents,msg = self.session.recv(self._query_socket, zmq.NOBLOCK)
1542 1542 if self.debug:
1543 1543 pprint(msg)
1544 1544 content = msg['content']
1545 1545 if content['status'] != 'ok':
1546 1546 raise self._unwrap_exception(content)
1547 1547 buffers = msg['buffers']
1548 1548 else:
1549 1549 content = dict(completed=[],pending=[])
1550 1550
1551 1551 content['completed'].extend(completed)
1552 1552
1553 1553 if status_only:
1554 1554 return content
1555 1555
1556 1556 failures = []
1557 1557 # load cached results into result:
1558 1558 content.update(local_results)
1559 1559
1560 1560 # update cache with results:
1561 1561 for msg_id in sorted(theids):
1562 1562 if msg_id in content['completed']:
1563 1563 rec = content[msg_id]
1564 1564 parent = extract_dates(rec['header'])
1565 1565 header = extract_dates(rec['result_header'])
1566 1566 rcontent = rec['result_content']
1567 1567 iodict = rec['io']
1568 1568 if isinstance(rcontent, str):
1569 1569 rcontent = self.session.unpack(rcontent)
1570 1570
1571 1571 md = self.metadata[msg_id]
1572 1572 md_msg = dict(
1573 1573 content=rcontent,
1574 1574 parent_header=parent,
1575 1575 header=header,
1576 1576 metadata=rec['result_metadata'],
1577 1577 )
1578 1578 md.update(self._extract_metadata(md_msg))
1579 1579 if rec.get('received'):
1580 1580 md['received'] = parse_date(rec['received'])
1581 1581 md.update(iodict)
1582 1582
1583 1583 if rcontent['status'] == 'ok':
1584 1584 if header['msg_type'] == 'apply_reply':
1585 1585 res,buffers = serialize.unserialize_object(buffers)
1586 1586 elif header['msg_type'] == 'execute_reply':
1587 1587 res = ExecuteReply(msg_id, rcontent, md)
1588 1588 else:
1589 1589 raise KeyError("unhandled msg type: %r" % header['msg_type'])
1590 1590 else:
1591 1591 res = self._unwrap_exception(rcontent)
1592 1592 failures.append(res)
1593 1593
1594 1594 self.results[msg_id] = res
1595 1595 content[msg_id] = res
1596 1596
1597 1597 if len(theids) == 1 and failures:
1598 1598 raise failures[0]
1599 1599
1600 1600 error.collect_exceptions(failures, "result_status")
1601 1601 return content
1602 1602
1603 1603 @spin_first
1604 1604 def queue_status(self, targets='all', verbose=False):
1605 1605 """Fetch the status of engine queues.
1606 1606
1607 1607 Parameters
1608 1608 ----------
1609 1609
1610 1610 targets : int/str/list of ints/strs
1611 1611 the engines whose states are to be queried.
1612 1612 default : all
1613 1613 verbose : bool
1614 1614 Whether to return lengths only, or lists of ids for each element
1615 1615 """
1616 1616 if targets == 'all':
1617 1617 # allow 'all' to be evaluated on the engine
1618 1618 engine_ids = None
1619 1619 else:
1620 1620 engine_ids = self._build_targets(targets)[1]
1621 1621 content = dict(targets=engine_ids, verbose=verbose)
1622 1622 self.session.send(self._query_socket, "queue_request", content=content)
1623 1623 idents,msg = self.session.recv(self._query_socket, 0)
1624 1624 if self.debug:
1625 1625 pprint(msg)
1626 1626 content = msg['content']
1627 1627 status = content.pop('status')
1628 1628 if status != 'ok':
1629 1629 raise self._unwrap_exception(content)
1630 1630 content = rekey(content)
1631 1631 if isinstance(targets, int):
1632 1632 return content[targets]
1633 1633 else:
1634 1634 return content
1635 1635
1636 1636 def _build_msgids_from_target(self, targets=None):
1637 1637 """Build a list of msg_ids from the list of engine targets"""
1638 1638 if not targets: # needed as _build_targets otherwise uses all engines
1639 1639 return []
1640 1640 target_ids = self._build_targets(targets)[0]
1641 1641 return [md_id for md_id in self.metadata if self.metadata[md_id]["engine_uuid"] in target_ids]
1642 1642
1643 1643 def _build_msgids_from_jobs(self, jobs=None):
1644 1644 """Build a list of msg_ids from "jobs" """
1645 1645 if not jobs:
1646 1646 return []
1647 1647 msg_ids = []
1648 1648 if isinstance(jobs, string_types + (AsyncResult,)):
1649 1649 jobs = [jobs]
1650 1650 bad_ids = [obj for obj in jobs if not isinstance(obj, string_types + (AsyncResult,))]
1651 1651 if bad_ids:
1652 1652 raise TypeError("Invalid msg_id type %r, expected str or AsyncResult"%bad_ids[0])
1653 1653 for j in jobs:
1654 1654 if isinstance(j, AsyncResult):
1655 1655 msg_ids.extend(j.msg_ids)
1656 1656 else:
1657 1657 msg_ids.append(j)
1658 1658 return msg_ids
1659 1659
1660 1660 def purge_local_results(self, jobs=[], targets=[]):
1661 1661 """Clears the client caches of results and their metadata.
1662 1662
1663 1663 Individual results can be purged by msg_id, or the entire
1664 1664 history of specific targets can be purged.
1665 1665
1666 1666 Use `purge_local_results('all')` to scrub everything from the Clients's
1667 1667 results and metadata caches.
1668 1668
1669 1669 After this call all `AsyncResults` are invalid and should be discarded.
1670 1670
1671 1671 If you must "reget" the results, you can still do so by using
1672 1672 `client.get_result(msg_id)` or `client.get_result(asyncresult)`. This will
1673 1673 redownload the results from the hub if they are still available
1674 1674 (i.e `client.purge_hub_results(...)` has not been called.
1675 1675
1676 1676 Parameters
1677 1677 ----------
1678 1678
1679 1679 jobs : str or list of str or AsyncResult objects
1680 1680 the msg_ids whose results should be purged.
1681 1681 targets : int/list of ints
1682 1682 The engines, by integer ID, whose entire result histories are to be purged.
1683 1683
1684 1684 Raises
1685 1685 ------
1686 1686
1687 1687 RuntimeError : if any of the tasks to be purged are still outstanding.
1688 1688
1689 1689 """
1690 1690 if not targets and not jobs:
1691 1691 raise ValueError("Must specify at least one of `targets` and `jobs`")
1692 1692
1693 1693 if jobs == 'all':
1694 1694 if self.outstanding:
1695 1695 raise RuntimeError("Can't purge outstanding tasks: %s" % self.outstanding)
1696 1696 self.results.clear()
1697 1697 self.metadata.clear()
1698 1698 else:
1699 1699 msg_ids = set()
1700 1700 msg_ids.update(self._build_msgids_from_target(targets))
1701 1701 msg_ids.update(self._build_msgids_from_jobs(jobs))
1702 1702 still_outstanding = self.outstanding.intersection(msg_ids)
1703 1703 if still_outstanding:
1704 1704 raise RuntimeError("Can't purge outstanding tasks: %s" % still_outstanding)
1705 1705 for mid in msg_ids:
1706 1706 self.results.pop(mid)
1707 1707 self.metadata.pop(mid)
1708 1708
1709 1709
1710 1710 @spin_first
1711 1711 def purge_hub_results(self, jobs=[], targets=[]):
1712 1712 """Tell the Hub to forget results.
1713 1713
1714 1714 Individual results can be purged by msg_id, or the entire
1715 1715 history of specific targets can be purged.
1716 1716
1717 1717 Use `purge_results('all')` to scrub everything from the Hub's db.
1718 1718
1719 1719 Parameters
1720 1720 ----------
1721 1721
1722 1722 jobs : str or list of str or AsyncResult objects
1723 1723 the msg_ids whose results should be forgotten.
1724 1724 targets : int/str/list of ints/strs
1725 1725 The targets, by int_id, whose entire history is to be purged.
1726 1726
1727 1727 default : None
1728 1728 """
1729 1729 if not targets and not jobs:
1730 1730 raise ValueError("Must specify at least one of `targets` and `jobs`")
1731 1731 if targets:
1732 1732 targets = self._build_targets(targets)[1]
1733 1733
1734 1734 # construct msg_ids from jobs
1735 1735 if jobs == 'all':
1736 1736 msg_ids = jobs
1737 1737 else:
1738 1738 msg_ids = self._build_msgids_from_jobs(jobs)
1739 1739
1740 1740 content = dict(engine_ids=targets, msg_ids=msg_ids)
1741 1741 self.session.send(self._query_socket, "purge_request", content=content)
1742 1742 idents, msg = self.session.recv(self._query_socket, 0)
1743 1743 if self.debug:
1744 1744 pprint(msg)
1745 1745 content = msg['content']
1746 1746 if content['status'] != 'ok':
1747 1747 raise self._unwrap_exception(content)
1748 1748
1749 1749 def purge_results(self, jobs=[], targets=[]):
1750 1750 """Clears the cached results from both the hub and the local client
1751 1751
1752 1752 Individual results can be purged by msg_id, or the entire
1753 1753 history of specific targets can be purged.
1754 1754
1755 1755 Use `purge_results('all')` to scrub every cached result from both the Hub's and
1756 1756 the Client's db.
1757 1757
1758 1758 Equivalent to calling both `purge_hub_results()` and `purge_client_results()` with
1759 1759 the same arguments.
1760 1760
1761 1761 Parameters
1762 1762 ----------
1763 1763
1764 1764 jobs : str or list of str or AsyncResult objects
1765 1765 the msg_ids whose results should be forgotten.
1766 1766 targets : int/str/list of ints/strs
1767 1767 The targets, by int_id, whose entire history is to be purged.
1768 1768
1769 1769 default : None
1770 1770 """
1771 1771 self.purge_local_results(jobs=jobs, targets=targets)
1772 1772 self.purge_hub_results(jobs=jobs, targets=targets)
1773 1773
1774 1774 def purge_everything(self):
1775 1775 """Clears all content from previous Tasks from both the hub and the local client
1776 1776
1777 1777 In addition to calling `purge_results("all")` it also deletes the history and
1778 1778 other bookkeeping lists.
1779 1779 """
1780 1780 self.purge_results("all")
1781 1781 self.history = []
1782 1782 self.session.digest_history.clear()
1783 1783
1784 1784 @spin_first
1785 1785 def hub_history(self):
1786 1786 """Get the Hub's history
1787 1787
1788 1788 Just like the Client, the Hub has a history, which is a list of msg_ids.
1789 1789 This will contain the history of all clients, and, depending on configuration,
1790 1790 may contain history across multiple cluster sessions.
1791 1791
1792 1792 Any msg_id returned here is a valid argument to `get_result`.
1793 1793
1794 1794 Returns
1795 1795 -------
1796 1796
1797 1797 msg_ids : list of strs
1798 1798 list of all msg_ids, ordered by task submission time.
1799 1799 """
1800 1800
1801 1801 self.session.send(self._query_socket, "history_request", content={})
1802 1802 idents, msg = self.session.recv(self._query_socket, 0)
1803 1803
1804 1804 if self.debug:
1805 1805 pprint(msg)
1806 1806 content = msg['content']
1807 1807 if content['status'] != 'ok':
1808 1808 raise self._unwrap_exception(content)
1809 1809 else:
1810 1810 return content['history']
1811 1811
1812 1812 @spin_first
1813 1813 def db_query(self, query, keys=None):
1814 1814 """Query the Hub's TaskRecord database
1815 1815
1816 1816 This will return a list of task record dicts that match `query`
1817 1817
1818 1818 Parameters
1819 1819 ----------
1820 1820
1821 1821 query : mongodb query dict
1822 1822 The search dict. See mongodb query docs for details.
1823 1823 keys : list of strs [optional]
1824 1824 The subset of keys to be returned. The default is to fetch everything but buffers.
1825 1825 'msg_id' will *always* be included.
1826 1826 """
1827 1827 if isinstance(keys, string_types):
1828 1828 keys = [keys]
1829 1829 content = dict(query=query, keys=keys)
1830 1830 self.session.send(self._query_socket, "db_request", content=content)
1831 1831 idents, msg = self.session.recv(self._query_socket, 0)
1832 1832 if self.debug:
1833 1833 pprint(msg)
1834 1834 content = msg['content']
1835 1835 if content['status'] != 'ok':
1836 1836 raise self._unwrap_exception(content)
1837 1837
1838 1838 records = content['records']
1839 1839
1840 1840 buffer_lens = content['buffer_lens']
1841 1841 result_buffer_lens = content['result_buffer_lens']
1842 1842 buffers = msg['buffers']
1843 1843 has_bufs = buffer_lens is not None
1844 1844 has_rbufs = result_buffer_lens is not None
1845 1845 for i,rec in enumerate(records):
1846 1846 # unpack datetime objects
1847 1847 for hkey in ('header', 'result_header'):
1848 1848 if hkey in rec:
1849 1849 rec[hkey] = extract_dates(rec[hkey])
1850 1850 for dtkey in ('submitted', 'started', 'completed', 'received'):
1851 1851 if dtkey in rec:
1852 1852 rec[dtkey] = parse_date(rec[dtkey])
1853 1853 # relink buffers
1854 1854 if has_bufs:
1855 1855 blen = buffer_lens[i]
1856 1856 rec['buffers'], buffers = buffers[:blen],buffers[blen:]
1857 1857 if has_rbufs:
1858 1858 blen = result_buffer_lens[i]
1859 1859 rec['result_buffers'], buffers = buffers[:blen],buffers[blen:]
1860 1860
1861 1861 return records
1862 1862
1863 1863 __all__ = [ 'Client' ]
@@ -1,179 +1,169 b''
1 1 # encoding: utf-8
2 """
3 IO capturing utilities.
4 """
2 """IO capturing utilities."""
5 3
6 #-----------------------------------------------------------------------------
7 # Copyright (C) 2013 The IPython Development Team
8 #
9 # Distributed under the terms of the BSD License. The full license is in
10 # the file COPYING, distributed as part of this software.
11 #-----------------------------------------------------------------------------
12 from __future__ import print_function, absolute_import
4 # Copyright (c) IPython Development Team.
5 # Distributed under the terms of the Modified BSD License.
13 6
14 #-----------------------------------------------------------------------------
15 # Imports
16 #-----------------------------------------------------------------------------
7 from __future__ import print_function, absolute_import
17 8
18 9 import sys
19 10
20 11 from IPython.utils.py3compat import PY3
21 12
22 13 if PY3:
23 14 from io import StringIO
24 15 else:
25 16 from StringIO import StringIO
26 17
27 18 #-----------------------------------------------------------------------------
28 19 # Classes and functions
29 20 #-----------------------------------------------------------------------------
30 21
31 22
32 23 class RichOutput(object):
33 def __init__(self, source="", data=None, metadata=None):
34 self.source = source
24 def __init__(self, data=None, metadata=None):
35 25 self.data = data or {}
36 26 self.metadata = metadata or {}
37 27
38 28 def display(self):
39 29 from IPython.display import publish_display_data
40 publish_display_data(self.source, self.data, self.metadata)
30 publish_display_data(data=self.data, metadata=self.metadata)
41 31
42 32 def _repr_mime_(self, mime):
43 33 if mime not in self.data:
44 34 return
45 35 data = self.data[mime]
46 36 if mime in self.metadata:
47 37 return data, self.metadata[mime]
48 38 else:
49 39 return data
50 40
51 41 def _repr_html_(self):
52 42 return self._repr_mime_("text/html")
53 43
54 44 def _repr_latex_(self):
55 45 return self._repr_mime_("text/latex")
56 46
57 47 def _repr_json_(self):
58 48 return self._repr_mime_("application/json")
59 49
60 50 def _repr_javascript_(self):
61 51 return self._repr_mime_("application/javascript")
62 52
63 53 def _repr_png_(self):
64 54 return self._repr_mime_("image/png")
65 55
66 56 def _repr_jpeg_(self):
67 57 return self._repr_mime_("image/jpeg")
68 58
69 59 def _repr_svg_(self):
70 60 return self._repr_mime_("image/svg+xml")
71 61
72 62
73 63 class CapturedIO(object):
74 64 """Simple object for containing captured stdout/err and rich display StringIO objects
75 65
76 66 Each instance `c` has three attributes:
77 67
78 68 - ``c.stdout`` : standard output as a string
79 69 - ``c.stderr`` : standard error as a string
80 70 - ``c.outputs``: a list of rich display outputs
81 71
82 72 Additionally, there's a ``c.show()`` method which will print all of the
83 73 above in the same order, and can be invoked simply via ``c()``.
84 74 """
85 75
86 76 def __init__(self, stdout, stderr, outputs=None):
87 77 self._stdout = stdout
88 78 self._stderr = stderr
89 79 if outputs is None:
90 80 outputs = []
91 81 self._outputs = outputs
92 82
93 83 def __str__(self):
94 84 return self.stdout
95 85
96 86 @property
97 87 def stdout(self):
98 88 "Captured standard output"
99 89 if not self._stdout:
100 90 return ''
101 91 return self._stdout.getvalue()
102 92
103 93 @property
104 94 def stderr(self):
105 95 "Captured standard error"
106 96 if not self._stderr:
107 97 return ''
108 98 return self._stderr.getvalue()
109 99
110 100 @property
111 101 def outputs(self):
112 102 """A list of the captured rich display outputs, if any.
113 103
114 104 If you have a CapturedIO object ``c``, these can be displayed in IPython
115 105 using::
116 106
117 107 from IPython.display import display
118 108 for o in c.outputs:
119 109 display(o)
120 110 """
121 return [ RichOutput(s, d, md) for s, d, md in self._outputs ]
111 return [ RichOutput(d, md) for d, md in self._outputs ]
122 112
123 113 def show(self):
124 114 """write my output to sys.stdout/err as appropriate"""
125 115 sys.stdout.write(self.stdout)
126 116 sys.stderr.write(self.stderr)
127 117 sys.stdout.flush()
128 118 sys.stderr.flush()
129 for source, data, metadata in self._outputs:
130 RichOutput(source, data, metadata).display()
119 for data, metadata in self._outputs:
120 RichOutput(data, metadata).display()
131 121
132 122 __call__ = show
133 123
134 124
135 125 class capture_output(object):
136 126 """context manager for capturing stdout/err"""
137 127 stdout = True
138 128 stderr = True
139 129 display = True
140 130
141 131 def __init__(self, stdout=True, stderr=True, display=True):
142 132 self.stdout = stdout
143 133 self.stderr = stderr
144 134 self.display = display
145 135 self.shell = None
146 136
147 137 def __enter__(self):
148 138 from IPython.core.getipython import get_ipython
149 139 from IPython.core.displaypub import CapturingDisplayPublisher
150 140
151 141 self.sys_stdout = sys.stdout
152 142 self.sys_stderr = sys.stderr
153 143
154 144 if self.display:
155 145 self.shell = get_ipython()
156 146 if self.shell is None:
157 147 self.save_display_pub = None
158 148 self.display = False
159 149
160 150 stdout = stderr = outputs = None
161 151 if self.stdout:
162 152 stdout = sys.stdout = StringIO()
163 153 if self.stderr:
164 154 stderr = sys.stderr = StringIO()
165 155 if self.display:
166 156 self.save_display_pub = self.shell.display_pub
167 157 self.shell.display_pub = CapturingDisplayPublisher()
168 158 outputs = self.shell.display_pub.outputs
169 159
170 160
171 161 return CapturedIO(stdout, stderr, outputs)
172 162
173 163 def __exit__(self, exc_type, exc_value, traceback):
174 164 sys.stdout = self.sys_stdout
175 165 sys.stderr = self.sys_stderr
176 166 if self.display and self.shell:
177 167 self.shell.display_pub = self.save_display_pub
178 168
179 169
@@ -1,161 +1,160 b''
1 1 # encoding: utf-8
2 2 """Tests for IPython.utils.capture"""
3 3
4 4 #-----------------------------------------------------------------------------
5 5 # Copyright (C) 2013 The IPython Development Team
6 6 #
7 7 # Distributed under the terms of the BSD License. The full license is in
8 8 # the file COPYING, distributed as part of this software.
9 9 #-----------------------------------------------------------------------------
10 10
11 11 #-----------------------------------------------------------------------------
12 12 # Imports
13 13 #-----------------------------------------------------------------------------
14 14
15 15 from __future__ import print_function
16 16
17 17 import sys
18 18
19 19 import nose.tools as nt
20 20
21 21 from IPython.utils import capture
22 22
23 23 #-----------------------------------------------------------------------------
24 24 # Globals
25 25 #-----------------------------------------------------------------------------
26 26
27 27 _mime_map = dict(
28 28 _repr_png_="image/png",
29 29 _repr_jpeg_="image/jpeg",
30 30 _repr_svg_="image/svg+xml",
31 31 _repr_html_="text/html",
32 32 _repr_json_="application/json",
33 33 _repr_javascript_="application/javascript",
34 34 )
35 35
36 36 basic_data = {
37 37 'image/png' : b'binarydata',
38 38 'text/html' : "<b>bold</b>",
39 39 }
40 40 basic_metadata = {
41 41 'image/png' : {
42 42 'width' : 10,
43 43 'height' : 20,
44 44 },
45 45 }
46 46
47 47 full_data = {
48 48 'image/png' : b'binarydata',
49 49 'image/jpeg' : b'binarydata',
50 50 'image/svg+xml' : "<svg>",
51 51 'text/html' : "<b>bold</b>",
52 52 'application/javascript' : "alert();",
53 53 'application/json' : "{}",
54 54 }
55 55 full_metadata = {
56 56 'image/png' : {"png" : "exists"},
57 57 'image/jpeg' : {"jpeg" : "exists"},
58 58 'image/svg+xml' : {"svg" : "exists"},
59 59 'text/html' : {"html" : "exists"},
60 60 'application/javascript' : {"js" : "exists"},
61 61 'application/json' : {"json" : "exists"},
62 62 }
63 63
64 64 hello_stdout = "hello, stdout"
65 65 hello_stderr = "hello, stderr"
66 66
67 67 #-----------------------------------------------------------------------------
68 68 # Test Functions
69 69 #-----------------------------------------------------------------------------
70 70
71 71 def test_rich_output_empty():
72 72 """RichOutput with no args"""
73 73 rich = capture.RichOutput()
74 74 for method, mime in _mime_map.items():
75 75 yield nt.assert_equal, getattr(rich, method)(), None
76 76
77 77 def test_rich_output():
78 78 """test RichOutput basics"""
79 79 data = basic_data
80 80 metadata = basic_metadata
81 rich = capture.RichOutput(source="test", data=data, metadata=metadata)
82 yield nt.assert_equal, rich.source, "test"
81 rich = capture.RichOutput(data=data, metadata=metadata)
83 82 yield nt.assert_equal, rich._repr_html_(), data['text/html']
84 83 yield nt.assert_equal, rich._repr_png_(), (data['image/png'], metadata['image/png'])
85 84 yield nt.assert_equal, rich._repr_latex_(), None
86 85 yield nt.assert_equal, rich._repr_javascript_(), None
87 86 yield nt.assert_equal, rich._repr_svg_(), None
88 87
89 88 def test_rich_output_no_metadata():
90 89 """test RichOutput with no metadata"""
91 90 data = full_data
92 rich = capture.RichOutput(source="test", data=data)
91 rich = capture.RichOutput(data=data)
93 92 for method, mime in _mime_map.items():
94 93 yield nt.assert_equal, getattr(rich, method)(), data[mime]
95 94
96 95 def test_rich_output_metadata():
97 96 """test RichOutput with metadata"""
98 97 data = full_data
99 98 metadata = full_metadata
100 rich = capture.RichOutput(source="test", data=data, metadata=metadata)
99 rich = capture.RichOutput(data=data, metadata=metadata)
101 100 for method, mime in _mime_map.items():
102 101 yield nt.assert_equal, getattr(rich, method)(), (data[mime], metadata[mime])
103 102
104 103 def test_rich_output_display():
105 104 """test RichOutput.display
106 105
107 106 This is a bit circular, because we are actually using the capture code we are testing
108 107 to test itself.
109 108 """
110 109 data = full_data
111 110 rich = capture.RichOutput(data=data)
112 111 with capture.capture_output() as cap:
113 112 rich.display()
114 113 yield nt.assert_equal, len(cap.outputs), 1
115 114 rich2 = cap.outputs[0]
116 115 yield nt.assert_equal, rich2.data, rich.data
117 116 yield nt.assert_equal, rich2.metadata, rich.metadata
118 117
119 118 def test_capture_output():
120 119 """capture_output works"""
121 120 rich = capture.RichOutput(data=full_data)
122 121 with capture.capture_output() as cap:
123 122 print(hello_stdout, end="")
124 123 print(hello_stderr, end="", file=sys.stderr)
125 124 rich.display()
126 125 yield nt.assert_equal, hello_stdout, cap.stdout
127 126 yield nt.assert_equal, hello_stderr, cap.stderr
128 127
129 128 def test_capture_output_no_stdout():
130 129 """test capture_output(stdout=False)"""
131 130 rich = capture.RichOutput(data=full_data)
132 131 with capture.capture_output(stdout=False) as cap:
133 132 print(hello_stdout, end="")
134 133 print(hello_stderr, end="", file=sys.stderr)
135 134 rich.display()
136 135 yield nt.assert_equal, "", cap.stdout
137 136 yield nt.assert_equal, hello_stderr, cap.stderr
138 137 yield nt.assert_equal, len(cap.outputs), 1
139 138
140 139 def test_capture_output_no_stderr():
141 140 """test capture_output(stderr=False)"""
142 141 rich = capture.RichOutput(data=full_data)
143 142 # add nested capture_output so stderr doesn't make it to nose output
144 143 with capture.capture_output(), capture.capture_output(stderr=False) as cap:
145 144 print(hello_stdout, end="")
146 145 print(hello_stderr, end="", file=sys.stderr)
147 146 rich.display()
148 147 yield nt.assert_equal, hello_stdout, cap.stdout
149 148 yield nt.assert_equal, "", cap.stderr
150 149 yield nt.assert_equal, len(cap.outputs), 1
151 150
152 151 def test_capture_output_no_display():
153 152 """test capture_output(display=False)"""
154 153 rich = capture.RichOutput(data=full_data)
155 154 with capture.capture_output(display=False) as cap:
156 155 print(hello_stdout, end="")
157 156 print(hello_stderr, end="", file=sys.stderr)
158 157 rich.display()
159 158 yield nt.assert_equal, hello_stdout, cap.stdout
160 159 yield nt.assert_equal, hello_stderr, cap.stderr
161 160 yield nt.assert_equal, cap.outputs, [] No newline at end of file
General Comments 0
You need to be logged in to leave comments. Login now