##// END OF EJS Templates
main branch resync
laurent.dufrechou@gmail.com -
r1824:34b4cf01 merge
parent child Browse files
Show More
This diff has been collapsed as it changes many lines, (1867 lines changed) Show them Hide them
@@ -0,0 +1,1867
1 # -*- coding: utf-8 -*-
2
3 # Copyright � 2006 Steven J. Bethard <steven.bethard@gmail.com>.
4 #
5 # Redistribution and use in source and binary forms, with or without
6 # modification, are permitted under the terms of the 3-clause BSD
7 # license. No warranty expressed or implied.
8 # For details, see the accompanying file LICENSE.txt.
9
10 """Command-line parsing library
11
12 This module is an optparse-inspired command-line parsing library that:
13
14 * handles both optional and positional arguments
15 * produces highly informative usage messages
16 * supports parsers that dispatch to sub-parsers
17
18 The following is a simple usage example that sums integers from the
19 command-line and writes the result to a file:
20
21 parser = argparse.ArgumentParser(
22 description='sum the integers at the command line')
23 parser.add_argument(
24 'integers', metavar='int', nargs='+', type=int,
25 help='an integer to be summed')
26 parser.add_argument(
27 '--log', default=sys.stdout, type=argparse.FileType('w'),
28 help='the file where the sum should be written')
29 args = parser.parse_args()
30 args.log.write('%s' % sum(args.integers))
31 args.log.close()
32
33 The module contains the following public classes:
34
35 ArgumentParser -- The main entry point for command-line parsing. As the
36 example above shows, the add_argument() method is used to populate
37 the parser with actions for optional and positional arguments. Then
38 the parse_args() method is invoked to convert the args at the
39 command-line into an object with attributes.
40
41 ArgumentError -- The exception raised by ArgumentParser objects when
42 there are errors with the parser's actions. Errors raised while
43 parsing the command-line are caught by ArgumentParser and emitted
44 as command-line messages.
45
46 FileType -- A factory for defining types of files to be created. As the
47 example above shows, instances of FileType are typically passed as
48 the type= argument of add_argument() calls.
49
50 Action -- The base class for parser actions. Typically actions are
51 selected by passing strings like 'store_true' or 'append_const' to
52 the action= argument of add_argument(). However, for greater
53 customization of ArgumentParser actions, subclasses of Action may
54 be defined and passed as the action= argument.
55
56 HelpFormatter, RawDescriptionHelpFormatter -- Formatter classes which
57 may be passed as the formatter_class= argument to the
58 ArgumentParser constructor. HelpFormatter is the default, while
59 RawDescriptionHelpFormatter tells the parser not to perform any
60 line-wrapping on description text.
61
62 All other classes in this module are considered implementation details.
63 (Also note that HelpFormatter and RawDescriptionHelpFormatter are only
64 considered public as object names -- the API of the formatter objects is
65 still considered an implementation detail.)
66 """
67
68 __version__ = '0.8.0'
69
70 import os as _os
71 import re as _re
72 import sys as _sys
73 import textwrap as _textwrap
74
75 from gettext import gettext as _
76
77 SUPPRESS = '==SUPPRESS=='
78
79 OPTIONAL = '?'
80 ZERO_OR_MORE = '*'
81 ONE_OR_MORE = '+'
82 PARSER = '==PARSER=='
83
84 # =============================
85 # Utility functions and classes
86 # =============================
87
88 class _AttributeHolder(object):
89 """Abstract base class that provides __repr__.
90
91 The __repr__ method returns a string in the format:
92 ClassName(attr=name, attr=name, ...)
93 The attributes are determined either by a class-level attribute,
94 '_kwarg_names', or by inspecting the instance __dict__.
95 """
96
97 def __repr__(self):
98 type_name = type(self).__name__
99 arg_strings = []
100 for arg in self._get_args():
101 arg_strings.append(repr(arg))
102 for name, value in self._get_kwargs():
103 arg_strings.append('%s=%r' % (name, value))
104 return '%s(%s)' % (type_name, ', '.join(arg_strings))
105
106 def _get_kwargs(self):
107 return sorted(self.__dict__.items())
108
109 def _get_args(self):
110 return []
111
112 def _ensure_value(namespace, name, value):
113 if getattr(namespace, name, None) is None:
114 setattr(namespace, name, value)
115 return getattr(namespace, name)
116
117
118
119 # ===============
120 # Formatting Help
121 # ===============
122
123 class HelpFormatter(object):
124
125 def __init__(self,
126 prog,
127 indent_increment=2,
128 max_help_position=24,
129 width=None):
130
131 # default setting for width
132 if width is None:
133 try:
134 width = int(_os.environ['COLUMNS'])
135 except (KeyError, ValueError):
136 width = 80
137 width -= 2
138
139 self._prog = prog
140 self._indent_increment = indent_increment
141 self._max_help_position = max_help_position
142 self._width = width
143
144 self._current_indent = 0
145 self._level = 0
146 self._action_max_length = 0
147
148 self._root_section = self._Section(self, None)
149 self._current_section = self._root_section
150
151 self._whitespace_matcher = _re.compile(r'\s+')
152 self._long_break_matcher = _re.compile(r'\n\n\n+')
153
154 # ===============================
155 # Section and indentation methods
156 # ===============================
157
158 def _indent(self):
159 self._current_indent += self._indent_increment
160 self._level += 1
161
162 def _dedent(self):
163 self._current_indent -= self._indent_increment
164 assert self._current_indent >= 0, 'Indent decreased below 0.'
165 self._level -= 1
166
167 class _Section(object):
168 def __init__(self, formatter, parent, heading=None):
169 self.formatter = formatter
170 self.parent = parent
171 self.heading = heading
172 self.items = []
173
174 def format_help(self):
175 # format the indented section
176 if self.parent is not None:
177 self.formatter._indent()
178 join = self.formatter._join_parts
179 for func, args in self.items:
180 func(*args)
181 item_help = join(func(*args) for func, args in self.items)
182 if self.parent is not None:
183 self.formatter._dedent()
184
185 # return nothing if the section was empty
186 if not item_help:
187 return ''
188
189 # add the heading if the section was non-empty
190 if self.heading is not SUPPRESS and self.heading is not None:
191 current_indent = self.formatter._current_indent
192 heading = '%*s%s:\n' % (current_indent, '', self.heading)
193 else:
194 heading = ''
195
196 # join the section-initial newline, the heading and the help
197 return join(['\n', heading, item_help, '\n'])
198
199 def _add_item(self, func, args):
200 self._current_section.items.append((func, args))
201
202 # ========================
203 # Message building methods
204 # ========================
205
206 def start_section(self, heading):
207 self._indent()
208 section = self._Section(self, self._current_section, heading)
209 self._add_item(section.format_help, [])
210 self._current_section = section
211
212 def end_section(self):
213 self._current_section = self._current_section.parent
214 self._dedent()
215
216 def add_text(self, text):
217 if text is not SUPPRESS and text is not None:
218 self._add_item(self._format_text, [text])
219
220 def add_usage(self, usage, optionals, positionals, prefix=None):
221 if usage is not SUPPRESS:
222 args = usage, optionals, positionals, prefix
223 self._add_item(self._format_usage, args)
224
225 def add_argument(self, action):
226 if action.help is not SUPPRESS:
227
228 # find all invocations
229 get_invocation = self._format_action_invocation
230 invocations = [get_invocation(action)]
231 for subaction in self._iter_indented_subactions(action):
232 invocations.append(get_invocation(subaction))
233
234 # update the maximum item length
235 invocation_length = max(len(s) for s in invocations)
236 action_length = invocation_length + self._current_indent
237 self._action_max_length = max(self._action_max_length,
238 action_length)
239
240 # add the item to the list
241 self._add_item(self._format_action, [action])
242
243 def add_arguments(self, actions):
244 for action in actions:
245 self.add_argument(action)
246
247 # =======================
248 # Help-formatting methods
249 # =======================
250
251 def format_help(self):
252 help = self._root_section.format_help() % dict(prog=self._prog)
253 if help:
254 help = self._long_break_matcher.sub('\n\n', help)
255 help = help.strip('\n') + '\n'
256 return help
257
258 def _join_parts(self, part_strings):
259 return ''.join(part
260 for part in part_strings
261 if part and part is not SUPPRESS)
262
263 def _format_usage(self, usage, optionals, positionals, prefix):
264 if prefix is None:
265 prefix = _('usage: ')
266
267 # if no optionals or positionals are available, usage is just prog
268 if usage is None and not optionals and not positionals:
269 usage = '%(prog)s'
270
271 # if optionals and positionals are available, calculate usage
272 elif usage is None:
273 usage = '%(prog)s' % dict(prog=self._prog)
274
275 # determine width of "usage: PROG" and width of text
276 prefix_width = len(prefix) + len(usage) + 1
277 prefix_indent = self._current_indent + prefix_width
278 text_width = self._width - self._current_indent
279
280 # put them on one line if they're short enough
281 format = self._format_actions_usage
282 action_usage = format(optionals + positionals)
283 if prefix_width + len(action_usage) + 1 < text_width:
284 usage = '%s %s' % (usage, action_usage)
285
286 # if they're long, wrap optionals and positionals individually
287 else:
288 optional_usage = format(optionals)
289 positional_usage = format(positionals)
290 indent = ' ' * prefix_indent
291
292 # usage is made of PROG, optionals and positionals
293 parts = [usage, ' ']
294
295 # options always get added right after PROG
296 if optional_usage:
297 parts.append(_textwrap.fill(
298 optional_usage, text_width,
299 initial_indent=indent,
300 subsequent_indent=indent).lstrip())
301
302 # if there were options, put arguments on the next line
303 # otherwise, start them right after PROG
304 if positional_usage:
305 part = _textwrap.fill(
306 positional_usage, text_width,
307 initial_indent=indent,
308 subsequent_indent=indent).lstrip()
309 if optional_usage:
310 part = '\n' + indent + part
311 parts.append(part)
312 usage = ''.join(parts)
313
314 # prefix with 'usage:'
315 return '%s%s\n\n' % (prefix, usage)
316
317 def _format_actions_usage(self, actions):
318 parts = []
319 for action in actions:
320 if action.help is SUPPRESS:
321 continue
322
323 # produce all arg strings
324 if not action.option_strings:
325 parts.append(self._format_args(action, action.dest))
326
327 # produce the first way to invoke the option in brackets
328 else:
329 option_string = action.option_strings[0]
330
331 # if the Optional doesn't take a value, format is:
332 # -s or --long
333 if action.nargs == 0:
334 part = '%s' % option_string
335
336 # if the Optional takes a value, format is:
337 # -s ARGS or --long ARGS
338 else:
339 default = action.dest.upper()
340 args_string = self._format_args(action, default)
341 part = '%s %s' % (option_string, args_string)
342
343 # make it look optional if it's not required
344 if not action.required:
345 part = '[%s]' % part
346 parts.append(part)
347
348 return ' '.join(parts)
349
350 def _format_text(self, text):
351 text_width = self._width - self._current_indent
352 indent = ' ' * self._current_indent
353 return self._fill_text(text, text_width, indent) + '\n\n'
354
355 def _format_action(self, action):
356 # determine the required width and the entry label
357 help_position = min(self._action_max_length + 2,
358 self._max_help_position)
359 help_width = self._width - help_position
360 action_width = help_position - self._current_indent - 2
361 action_header = self._format_action_invocation(action)
362
363 # ho nelp; start on same line and add a final newline
364 if not action.help:
365 tup = self._current_indent, '', action_header
366 action_header = '%*s%s\n' % tup
367
368 # short action name; start on the same line and pad two spaces
369 elif len(action_header) <= action_width:
370 tup = self._current_indent, '', action_width, action_header
371 action_header = '%*s%-*s ' % tup
372 indent_first = 0
373
374 # long action name; start on the next line
375 else:
376 tup = self._current_indent, '', action_header
377 action_header = '%*s%s\n' % tup
378 indent_first = help_position
379
380 # collect the pieces of the action help
381 parts = [action_header]
382
383 # if there was help for the action, add lines of help text
384 if action.help:
385 help_text = self._expand_help(action)
386 help_lines = self._split_lines(help_text, help_width)
387 parts.append('%*s%s\n' % (indent_first, '', help_lines[0]))
388 for line in help_lines[1:]:
389 parts.append('%*s%s\n' % (help_position, '', line))
390
391 # or add a newline if the description doesn't end with one
392 elif not action_header.endswith('\n'):
393 parts.append('\n')
394
395 # if there are any sub-actions, add their help as well
396 for subaction in self._iter_indented_subactions(action):
397 parts.append(self._format_action(subaction))
398
399 # return a single string
400 return self._join_parts(parts)
401
402 def _format_action_invocation(self, action):
403 if not action.option_strings:
404 return self._format_metavar(action, action.dest)
405
406 else:
407 parts = []
408
409 # if the Optional doesn't take a value, format is:
410 # -s, --long
411 if action.nargs == 0:
412 parts.extend(action.option_strings)
413
414 # if the Optional takes a value, format is:
415 # -s ARGS, --long ARGS
416 else:
417 default = action.dest.upper()
418 args_string = self._format_args(action, default)
419 for option_string in action.option_strings:
420 parts.append('%s %s' % (option_string, args_string))
421
422 return ', '.join(parts)
423
424 def _format_metavar(self, action, default_metavar):
425 if action.metavar is not None:
426 name = action.metavar
427 elif action.choices is not None:
428 choice_strs = (str(choice) for choice in action.choices)
429 name = '{%s}' % ','.join(choice_strs)
430 else:
431 name = default_metavar
432 return name
433
434 def _format_args(self, action, default_metavar):
435 name = self._format_metavar(action, default_metavar)
436 if action.nargs is None:
437 result = name
438 elif action.nargs == OPTIONAL:
439 result = '[%s]' % name
440 elif action.nargs == ZERO_OR_MORE:
441 result = '[%s [%s ...]]' % (name, name)
442 elif action.nargs == ONE_OR_MORE:
443 result = '%s [%s ...]' % (name, name)
444 elif action.nargs is PARSER:
445 result = '%s ...' % name
446 else:
447 result = ' '.join([name] * action.nargs)
448 return result
449
450 def _expand_help(self, action):
451 params = dict(vars(action), prog=self._prog)
452 for name, value in params.items():
453 if value is SUPPRESS:
454 del params[name]
455 if params.get('choices') is not None:
456 choices_str = ', '.join(str(c) for c in params['choices'])
457 params['choices'] = choices_str
458 return action.help % params
459
460 def _iter_indented_subactions(self, action):
461 try:
462 get_subactions = action._get_subactions
463 except AttributeError:
464 pass
465 else:
466 self._indent()
467 for subaction in get_subactions():
468 yield subaction
469 self._dedent()
470
471 def _split_lines(self, text, width):
472 text = self._whitespace_matcher.sub(' ', text).strip()
473 return _textwrap.wrap(text, width)
474
475 def _fill_text(self, text, width, indent):
476 text = self._whitespace_matcher.sub(' ', text).strip()
477 return _textwrap.fill(text, width, initial_indent=indent,
478 subsequent_indent=indent)
479
480 class RawDescriptionHelpFormatter(HelpFormatter):
481
482 def _fill_text(self, text, width, indent):
483 return ''.join(indent + line for line in text.splitlines(True))
484
485 class RawTextHelpFormatter(RawDescriptionHelpFormatter):
486
487 def _split_lines(self, text, width):
488 return text.splitlines()
489
490 # =====================
491 # Options and Arguments
492 # =====================
493
494 class ArgumentError(Exception):
495 """ArgumentError(message, argument)
496
497 Raised whenever there was an error creating or using an argument
498 (optional or positional).
499
500 The string value of this exception is the message, augmented with
501 information about the argument that caused it.
502 """
503
504 def __init__(self, argument, message):
505 if argument.option_strings:
506 self.argument_name = '/'.join(argument.option_strings)
507 elif argument.metavar not in (None, SUPPRESS):
508 self.argument_name = argument.metavar
509 elif argument.dest not in (None, SUPPRESS):
510 self.argument_name = argument.dest
511 else:
512 self.argument_name = None
513 self.message = message
514
515 def __str__(self):
516 if self.argument_name is None:
517 format = '%(message)s'
518 else:
519 format = 'argument %(argument_name)s: %(message)s'
520 return format % dict(message=self.message,
521 argument_name=self.argument_name)
522
523 # ==============
524 # Action classes
525 # ==============
526
527 class Action(_AttributeHolder):
528 """Action(*strings, **options)
529
530 Action objects hold the information necessary to convert a
531 set of command-line arguments (possibly including an initial option
532 string) into the desired Python object(s).
533
534 Keyword Arguments:
535
536 option_strings -- A list of command-line option strings which
537 should be associated with this action.
538
539 dest -- The name of the attribute to hold the created object(s)
540
541 nargs -- The number of command-line arguments that should be consumed.
542 By default, one argument will be consumed and a single value will
543 be produced. Other values include:
544 * N (an integer) consumes N arguments (and produces a list)
545 * '?' consumes zero or one arguments
546 * '*' consumes zero or more arguments (and produces a list)
547 * '+' consumes one or more arguments (and produces a list)
548 Note that the difference between the default and nargs=1 is that
549 with the default, a single value will be produced, while with
550 nargs=1, a list containing a single value will be produced.
551
552 const -- The value to be produced if the option is specified and the
553 option uses an action that takes no values.
554
555 default -- The value to be produced if the option is not specified.
556
557 type -- The type which the command-line arguments should be converted
558 to, should be one of 'string', 'int', 'float', 'complex' or a
559 callable object that accepts a single string argument. If None,
560 'string' is assumed.
561
562 choices -- A container of values that should be allowed. If not None,
563 after a command-line argument has been converted to the appropriate
564 type, an exception will be raised if it is not a member of this
565 collection.
566
567 required -- True if the action must always be specified at the command
568 line. This is only meaningful for optional command-line arguments.
569
570 help -- The help string describing the argument.
571
572 metavar -- The name to be used for the option's argument with the help
573 string. If None, the 'dest' value will be used as the name.
574 """
575
576
577 def __init__(self,
578 option_strings,
579 dest,
580 nargs=None,
581 const=None,
582 default=None,
583 type=None,
584 choices=None,
585 required=False,
586 help=None,
587 metavar=None):
588 self.option_strings = option_strings
589 self.dest = dest
590 self.nargs = nargs
591 self.const = const
592 self.default = default
593 self.type = type
594 self.choices = choices
595 self.required = required
596 self.help = help
597 self.metavar = metavar
598
599 def _get_kwargs(self):
600 names = [
601 'option_strings',
602 'dest',
603 'nargs',
604 'const',
605 'default',
606 'type',
607 'choices',
608 'help',
609 'metavar'
610 ]
611 return [(name, getattr(self, name)) for name in names]
612
613 def __call__(self, parser, namespace, values, option_string=None):
614 raise NotImplementedError(_('.__call__() not defined'))
615
616 class _StoreAction(Action):
617 def __init__(self,
618 option_strings,
619 dest,
620 nargs=None,
621 const=None,
622 default=None,
623 type=None,
624 choices=None,
625 required=False,
626 help=None,
627 metavar=None):
628 if nargs == 0:
629 raise ValueError('nargs must be > 0')
630 if const is not None and nargs != OPTIONAL:
631 raise ValueError('nargs must be %r to supply const' % OPTIONAL)
632 super(_StoreAction, self).__init__(
633 option_strings=option_strings,
634 dest=dest,
635 nargs=nargs,
636 const=const,
637 default=default,
638 type=type,
639 choices=choices,
640 required=required,
641 help=help,
642 metavar=metavar)
643
644 def __call__(self, parser, namespace, values, option_string=None):
645 setattr(namespace, self.dest, values)
646
647 class _StoreConstAction(Action):
648 def __init__(self,
649 option_strings,
650 dest,
651 const,
652 default=None,
653 required=False,
654 help=None,
655 metavar=None):
656 super(_StoreConstAction, self).__init__(
657 option_strings=option_strings,
658 dest=dest,
659 nargs=0,
660 const=const,
661 default=default,
662 required=required,
663 help=help)
664
665 def __call__(self, parser, namespace, values, option_string=None):
666 setattr(namespace, self.dest, self.const)
667
668 class _StoreTrueAction(_StoreConstAction):
669 def __init__(self,
670 option_strings,
671 dest,
672 default=False,
673 required=False,
674 help=None):
675 super(_StoreTrueAction, self).__init__(
676 option_strings=option_strings,
677 dest=dest,
678 const=True,
679 default=default,
680 required=required,
681 help=help)
682
683 class _StoreFalseAction(_StoreConstAction):
684 def __init__(self,
685 option_strings,
686 dest,
687 default=True,
688 required=False,
689 help=None):
690 super(_StoreFalseAction, self).__init__(
691 option_strings=option_strings,
692 dest=dest,
693 const=False,
694 default=default,
695 required=required,
696 help=help)
697
698 class _AppendAction(Action):
699 def __init__(self,
700 option_strings,
701 dest,
702 nargs=None,
703 const=None,
704 default=None,
705 type=None,
706 choices=None,
707 required=False,
708 help=None,
709 metavar=None):
710 if nargs == 0:
711 raise ValueError('nargs must be > 0')
712 if const is not None and nargs != OPTIONAL:
713 raise ValueError('nargs must be %r to supply const' % OPTIONAL)
714 super(_AppendAction, self).__init__(
715 option_strings=option_strings,
716 dest=dest,
717 nargs=nargs,
718 const=const,
719 default=default,
720 type=type,
721 choices=choices,
722 required=required,
723 help=help,
724 metavar=metavar)
725
726 def __call__(self, parser, namespace, values, option_string=None):
727 _ensure_value(namespace, self.dest, []).append(values)
728
729 class _AppendConstAction(Action):
730 def __init__(self,
731 option_strings,
732 dest,
733 const,
734 default=None,
735 required=False,
736 help=None,
737 metavar=None):
738 super(_AppendConstAction, self).__init__(
739 option_strings=option_strings,
740 dest=dest,
741 nargs=0,
742 const=const,
743 default=default,
744 required=required,
745 help=help,
746 metavar=metavar)
747
748 def __call__(self, parser, namespace, values, option_string=None):
749 _ensure_value(namespace, self.dest, []).append(self.const)
750
751 class _CountAction(Action):
752 def __init__(self,
753 option_strings,
754 dest,
755 default=None,
756 required=False,
757 help=None):
758 super(_CountAction, self).__init__(
759 option_strings=option_strings,
760 dest=dest,
761 nargs=0,
762 default=default,
763 required=required,
764 help=help)
765
766 def __call__(self, parser, namespace, values, option_string=None):
767 new_count = _ensure_value(namespace, self.dest, 0) + 1
768 setattr(namespace, self.dest, new_count)
769
770 class _HelpAction(Action):
771 def __init__(self,
772 option_strings,
773 dest=SUPPRESS,
774 default=SUPPRESS,
775 help=None):
776 super(_HelpAction, self).__init__(
777 option_strings=option_strings,
778 dest=dest,
779 default=default,
780 nargs=0,
781 help=help)
782
783 def __call__(self, parser, namespace, values, option_string=None):
784 parser.print_help()
785 parser.exit()
786
787 class _VersionAction(Action):
788 def __init__(self,
789 option_strings,
790 dest=SUPPRESS,
791 default=SUPPRESS,
792 help=None):
793 super(_VersionAction, self).__init__(
794 option_strings=option_strings,
795 dest=dest,
796 default=default,
797 nargs=0,
798 help=help)
799
800 def __call__(self, parser, namespace, values, option_string=None):
801 parser.print_version()
802 parser.exit()
803
804 class _SubParsersAction(Action):
805
806 class _ChoicesPseudoAction(Action):
807 def __init__(self, name, help):
808 sup = super(_SubParsersAction._ChoicesPseudoAction, self)
809 sup.__init__(option_strings=[], dest=name, help=help)
810
811
812 def __init__(self,
813 option_strings,
814 prog,
815 parser_class,
816 dest=SUPPRESS,
817 help=None,
818 metavar=None):
819
820 self._prog_prefix = prog
821 self._parser_class = parser_class
822 self._name_parser_map = {}
823 self._choices_actions = []
824
825 super(_SubParsersAction, self).__init__(
826 option_strings=option_strings,
827 dest=dest,
828 nargs=PARSER,
829 choices=self._name_parser_map,
830 help=help,
831 metavar=metavar)
832
833 def add_parser(self, name, **kwargs):
834 # set prog from the existing prefix
835 if kwargs.get('prog') is None:
836 kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
837
838 # create a pseudo-action to hold the choice help
839 if 'help' in kwargs:
840 help = kwargs.pop('help')
841 choice_action = self._ChoicesPseudoAction(name, help)
842 self._choices_actions.append(choice_action)
843
844 # create the parser and add it to the map
845 parser = self._parser_class(**kwargs)
846 self._name_parser_map[name] = parser
847 return parser
848
849 def _get_subactions(self):
850 return self._choices_actions
851
852 def __call__(self, parser, namespace, values, option_string=None):
853 parser_name = values[0]
854 arg_strings = values[1:]
855
856 # set the parser name if requested
857 if self.dest is not SUPPRESS:
858 setattr(namespace, self.dest, parser_name)
859
860 # select the parser
861 try:
862 parser = self._name_parser_map[parser_name]
863 except KeyError:
864 tup = parser_name, ', '.join(self._name_parser_map)
865 msg = _('unknown parser %r (choices: %s)' % tup)
866 raise ArgumentError(self, msg)
867
868 # parse all the remaining options into the namespace
869 parser.parse_args(arg_strings, namespace)
870
871
872 # ==============
873 # Type classes
874 # ==============
875
876 class FileType(object):
877 """Factory for creating file object types
878
879 Instances of FileType are typically passed as type= arguments to the
880 ArgumentParser add_argument() method.
881
882 Keyword Arguments:
883 mode -- A string indicating how the file is to be opened. Accepts the
884 same values as the builtin open() function.
885 bufsize -- The file's desired buffer size. Accepts the same values as
886 the builtin open() function.
887 """
888 def __init__(self, mode='r', bufsize=None):
889 self._mode = mode
890 self._bufsize = bufsize
891
892 def __call__(self, string):
893 # the special argument "-" means sys.std{in,out}
894 if string == '-':
895 if self._mode == 'r':
896 return _sys.stdin
897 elif self._mode == 'w':
898 return _sys.stdout
899 else:
900 msg = _('argument "-" with mode %r' % self._mode)
901 raise ValueError(msg)
902
903 # all other arguments are used as file names
904 if self._bufsize:
905 return open(string, self._mode, self._bufsize)
906 else:
907 return open(string, self._mode)
908
909
910 # ===========================
911 # Optional and Positional Parsing
912 # ===========================
913
914 class Namespace(_AttributeHolder):
915
916 def __init__(self, **kwargs):
917 for name, value in kwargs.iteritems():
918 setattr(self, name, value)
919
920 def __eq__(self, other):
921 return vars(self) == vars(other)
922
923 def __ne__(self, other):
924 return not (self == other)
925
926
927 class _ActionsContainer(object):
928 def __init__(self,
929 description,
930 prefix_chars,
931 argument_default,
932 conflict_handler):
933 super(_ActionsContainer, self).__init__()
934
935 self.description = description
936 self.argument_default = argument_default
937 self.prefix_chars = prefix_chars
938 self.conflict_handler = conflict_handler
939
940 # set up registries
941 self._registries = {}
942
943 # register actions
944 self.register('action', None, _StoreAction)
945 self.register('action', 'store', _StoreAction)
946 self.register('action', 'store_const', _StoreConstAction)
947 self.register('action', 'store_true', _StoreTrueAction)
948 self.register('action', 'store_false', _StoreFalseAction)
949 self.register('action', 'append', _AppendAction)
950 self.register('action', 'append_const', _AppendConstAction)
951 self.register('action', 'count', _CountAction)
952 self.register('action', 'help', _HelpAction)
953 self.register('action', 'version', _VersionAction)
954 self.register('action', 'parsers', _SubParsersAction)
955
956 # raise an exception if the conflict handler is invalid
957 self._get_handler()
958
959 # action storage
960 self._optional_actions_list = []
961 self._positional_actions_list = []
962 self._positional_actions_full_list = []
963 self._option_strings = {}
964
965 # defaults storage
966 self._defaults = {}
967
968 # ====================
969 # Registration methods
970 # ====================
971
972 def register(self, registry_name, value, object):
973 registry = self._registries.setdefault(registry_name, {})
974 registry[value] = object
975
976 def _registry_get(self, registry_name, value, default=None):
977 return self._registries[registry_name].get(value, default)
978
979 # ==================================
980 # Namespace default settings methods
981 # ==================================
982
983 def set_defaults(self, **kwargs):
984 self._defaults.update(kwargs)
985
986 # if these defaults match any existing arguments, replace
987 # the previous default on the object with the new one
988 for action_list in [self._option_strings.values(),
989 self._positional_actions_full_list]:
990 for action in action_list:
991 if action.dest in kwargs:
992 action.default = kwargs[action.dest]
993
994 # =======================
995 # Adding argument actions
996 # =======================
997
998 def add_argument(self, *args, **kwargs):
999 """
1000 add_argument(dest, ..., name=value, ...)
1001 add_argument(option_string, option_string, ..., name=value, ...)
1002 """
1003
1004 # if no positional args are supplied or only one is supplied and
1005 # it doesn't look like an option string, parse a positional
1006 # argument
1007 chars = self.prefix_chars
1008 if not args or len(args) == 1 and args[0][0] not in chars:
1009 kwargs = self._get_positional_kwargs(*args, **kwargs)
1010
1011 # otherwise, we're adding an optional argument
1012 else:
1013 kwargs = self._get_optional_kwargs(*args, **kwargs)
1014
1015 # if no default was supplied, use the parser-level default
1016 if 'default' not in kwargs:
1017 dest = kwargs['dest']
1018 if dest in self._defaults:
1019 kwargs['default'] = self._defaults[dest]
1020 elif self.argument_default is not None:
1021 kwargs['default'] = self.argument_default
1022
1023 # create the action object, and add it to the parser
1024 action_class = self._pop_action_class(kwargs)
1025 action = action_class(**kwargs)
1026 return self._add_action(action)
1027
1028 def _add_action(self, action):
1029 # resolve any conflicts
1030 self._check_conflict(action)
1031
1032 # add to optional or positional list
1033 if action.option_strings:
1034 self._optional_actions_list.append(action)
1035 else:
1036 self._positional_actions_list.append(action)
1037 self._positional_actions_full_list.append(action)
1038 action.container = self
1039
1040 # index the action by any option strings it has
1041 for option_string in action.option_strings:
1042 self._option_strings[option_string] = action
1043
1044 # return the created action
1045 return action
1046
1047 def _add_container_actions(self, container):
1048 for action in container._optional_actions_list:
1049 self._add_action(action)
1050 for action in container._positional_actions_list:
1051 self._add_action(action)
1052
1053 def _get_positional_kwargs(self, dest, **kwargs):
1054 # make sure required is not specified
1055 if 'required' in kwargs:
1056 msg = _("'required' is an invalid argument for positionals")
1057 raise TypeError(msg)
1058
1059 # return the keyword arguments with no option strings
1060 return dict(kwargs, dest=dest, option_strings=[])
1061
1062 def _get_optional_kwargs(self, *args, **kwargs):
1063 # determine short and long option strings
1064 option_strings = []
1065 long_option_strings = []
1066 for option_string in args:
1067 # error on one-or-fewer-character option strings
1068 if len(option_string) < 2:
1069 msg = _('invalid option string %r: '
1070 'must be at least two characters long')
1071 raise ValueError(msg % option_string)
1072
1073 # error on strings that don't start with an appropriate prefix
1074 if not option_string[0] in self.prefix_chars:
1075 msg = _('invalid option string %r: '
1076 'must start with a character %r')
1077 tup = option_string, self.prefix_chars
1078 raise ValueError(msg % tup)
1079
1080 # error on strings that are all prefix characters
1081 if not (set(option_string) - set(self.prefix_chars)):
1082 msg = _('invalid option string %r: '
1083 'must contain characters other than %r')
1084 tup = option_string, self.prefix_chars
1085 raise ValueError(msg % tup)
1086
1087 # strings starting with two prefix characters are long options
1088 option_strings.append(option_string)
1089 if option_string[0] in self.prefix_chars:
1090 if option_string[1] in self.prefix_chars:
1091 long_option_strings.append(option_string)
1092
1093 # infer destination, '--foo-bar' -> 'foo_bar' and '-x' -> 'x'
1094 dest = kwargs.pop('dest', None)
1095 if dest is None:
1096 if long_option_strings:
1097 dest_option_string = long_option_strings[0]
1098 else:
1099 dest_option_string = option_strings[0]
1100 dest = dest_option_string.lstrip(self.prefix_chars)
1101 dest = dest.replace('-', '_')
1102
1103 # return the updated keyword arguments
1104 return dict(kwargs, dest=dest, option_strings=option_strings)
1105
1106 def _pop_action_class(self, kwargs, default=None):
1107 action = kwargs.pop('action', default)
1108 return self._registry_get('action', action, action)
1109
1110 def _get_handler(self):
1111 # determine function from conflict handler string
1112 handler_func_name = '_handle_conflict_%s' % self.conflict_handler
1113 try:
1114 return getattr(self, handler_func_name)
1115 except AttributeError:
1116 msg = _('invalid conflict_resolution value: %r')
1117 raise ValueError(msg % self.conflict_handler)
1118
1119 def _check_conflict(self, action):
1120
1121 # find all options that conflict with this option
1122 confl_optionals = []
1123 for option_string in action.option_strings:
1124 if option_string in self._option_strings:
1125 confl_optional = self._option_strings[option_string]
1126 confl_optionals.append((option_string, confl_optional))
1127
1128 # resolve any conflicts
1129 if confl_optionals:
1130 conflict_handler = self._get_handler()
1131 conflict_handler(action, confl_optionals)
1132
1133 def _handle_conflict_error(self, action, conflicting_actions):
1134 message = _('conflicting option string(s): %s')
1135 conflict_string = ', '.join(option_string
1136 for option_string, action
1137 in conflicting_actions)
1138 raise ArgumentError(action, message % conflict_string)
1139
1140 def _handle_conflict_resolve(self, action, conflicting_actions):
1141
1142 # remove all conflicting options
1143 for option_string, action in conflicting_actions:
1144
1145 # remove the conflicting option
1146 action.option_strings.remove(option_string)
1147 self._option_strings.pop(option_string, None)
1148
1149 # if the option now has no option string, remove it from the
1150 # container holding it
1151 if not action.option_strings:
1152 action.container._optional_actions_list.remove(action)
1153
1154
1155 class _ArgumentGroup(_ActionsContainer):
1156
1157 def __init__(self, container, title=None, description=None, **kwargs):
1158 # add any missing keyword arguments by checking the container
1159 update = kwargs.setdefault
1160 update('conflict_handler', container.conflict_handler)
1161 update('prefix_chars', container.prefix_chars)
1162 update('argument_default', container.argument_default)
1163 super_init = super(_ArgumentGroup, self).__init__
1164 super_init(description=description, **kwargs)
1165
1166 self.title = title
1167 self._registries = container._registries
1168 self._positional_actions_full_list = container._positional_actions_full_list
1169 self._option_strings = container._option_strings
1170 self._defaults = container._defaults
1171
1172
1173 class ArgumentParser(_AttributeHolder, _ActionsContainer):
1174
1175 def __init__(self,
1176 prog=None,
1177 usage=None,
1178 description=None,
1179 epilog=None,
1180 version=None,
1181 parents=[],
1182 formatter_class=HelpFormatter,
1183 prefix_chars='-',
1184 argument_default=None,
1185 conflict_handler='error',
1186 add_help=True):
1187
1188 superinit = super(ArgumentParser, self).__init__
1189 superinit(description=description,
1190 prefix_chars=prefix_chars,
1191 argument_default=argument_default,
1192 conflict_handler=conflict_handler)
1193
1194 # default setting for prog
1195 if prog is None:
1196 prog = _os.path.basename(_sys.argv[0])
1197
1198 self.prog = prog
1199 self.usage = usage
1200 self.epilog = epilog
1201 self.version = version
1202 self.formatter_class = formatter_class
1203 self.add_help = add_help
1204
1205 self._argument_group_class = _ArgumentGroup
1206 self._has_subparsers = False
1207 self._argument_groups = []
1208
1209 # register types
1210 def identity(string):
1211 return string
1212 self.register('type', None, identity)
1213
1214 # add help and version arguments if necessary
1215 # (using explicit default to override global argument_default)
1216 if self.add_help:
1217 self.add_argument(
1218 '-h', '--help', action='help', default=SUPPRESS,
1219 help=_('show this help message and exit'))
1220 if self.version:
1221 self.add_argument(
1222 '-v', '--version', action='version', default=SUPPRESS,
1223 help=_("show program's version number and exit"))
1224
1225 # add parent arguments and defaults
1226 for parent in parents:
1227 self._add_container_actions(parent)
1228 try:
1229 defaults = parent._defaults
1230 except AttributeError:
1231 pass
1232 else:
1233 self._defaults.update(defaults)
1234
1235 # determines whether an "option" looks like a negative number
1236 self._negative_number_matcher = _re.compile(r'^-\d+|-\d*.\d+$')
1237
1238
1239 # =======================
1240 # Pretty __repr__ methods
1241 # =======================
1242
1243 def _get_kwargs(self):
1244 names = [
1245 'prog',
1246 'usage',
1247 'description',
1248 'version',
1249 'formatter_class',
1250 'conflict_handler',
1251 'add_help',
1252 ]
1253 return [(name, getattr(self, name)) for name in names]
1254
1255 # ==================================
1256 # Optional/Positional adding methods
1257 # ==================================
1258
1259 def add_argument_group(self, *args, **kwargs):
1260 group = self._argument_group_class(self, *args, **kwargs)
1261 self._argument_groups.append(group)
1262 return group
1263
1264 def add_subparsers(self, **kwargs):
1265 if self._has_subparsers:
1266 self.error(_('cannot have multiple subparser arguments'))
1267
1268 # add the parser class to the arguments if it's not present
1269 kwargs.setdefault('parser_class', type(self))
1270
1271 # prog defaults to the usage message of this parser, skipping
1272 # optional arguments and with no "usage:" prefix
1273 if kwargs.get('prog') is None:
1274 formatter = self._get_formatter()
1275 formatter.add_usage(self.usage, [],
1276 self._get_positional_actions(), '')
1277 kwargs['prog'] = formatter.format_help().strip()
1278
1279 # create the parsers action and add it to the positionals list
1280 parsers_class = self._pop_action_class(kwargs, 'parsers')
1281 action = parsers_class(option_strings=[], **kwargs)
1282 self._positional_actions_list.append(action)
1283 self._positional_actions_full_list.append(action)
1284 self._has_subparsers = True
1285
1286 # return the created parsers action
1287 return action
1288
1289 def _add_container_actions(self, container):
1290 super(ArgumentParser, self)._add_container_actions(container)
1291 try:
1292 groups = container._argument_groups
1293 except AttributeError:
1294 pass
1295 else:
1296 for group in groups:
1297 new_group = self.add_argument_group(
1298 title=group.title,
1299 description=group.description,
1300 conflict_handler=group.conflict_handler)
1301 new_group._add_container_actions(group)
1302
1303 def _get_optional_actions(self):
1304 actions = []
1305 actions.extend(self._optional_actions_list)
1306 for argument_group in self._argument_groups:
1307 actions.extend(argument_group._optional_actions_list)
1308 return actions
1309
1310 def _get_positional_actions(self):
1311 return list(self._positional_actions_full_list)
1312
1313
1314 # =====================================
1315 # Command line argument parsing methods
1316 # =====================================
1317
1318 def parse_args(self, args=None, namespace=None):
1319 # args default to the system args
1320 if args is None:
1321 args = _sys.argv[1:]
1322
1323 # default Namespace built from parser defaults
1324 if namespace is None:
1325 namespace = Namespace()
1326
1327 # add any action defaults that aren't present
1328 optional_actions = self._get_optional_actions()
1329 positional_actions = self._get_positional_actions()
1330 for action in optional_actions + positional_actions:
1331 if action.dest is not SUPPRESS:
1332 if not hasattr(namespace, action.dest):
1333 if action.default is not SUPPRESS:
1334 default = action.default
1335 if isinstance(action.default, basestring):
1336 default = self._get_value(action, default)
1337 setattr(namespace, action.dest, default)
1338
1339 # add any parser defaults that aren't present
1340 for dest, value in self._defaults.iteritems():
1341 if not hasattr(namespace, dest):
1342 setattr(namespace, dest, value)
1343
1344 # parse the arguments and exit if there are any errors
1345 try:
1346 result = self._parse_args(args, namespace)
1347 except ArgumentError, err:
1348 self.error(str(err))
1349
1350 # make sure all required optionals are present
1351 for action in self._get_optional_actions():
1352 if action.required:
1353 if getattr(result, action.dest, None) is None:
1354 opt_strs = '/'.join(action.option_strings)
1355 msg = _('option %s is required' % opt_strs)
1356 self.error(msg)
1357
1358 # return the parsed arguments
1359 return result
1360
1361 def _parse_args(self, arg_strings, namespace):
1362
1363 # find all option indices, and determine the arg_string_pattern
1364 # which has an 'O' if there is an option at an index,
1365 # an 'A' if there is an argument, or a '-' if there is a '--'
1366 option_string_indices = {}
1367 arg_string_pattern_parts = []
1368 arg_strings_iter = iter(arg_strings)
1369 for i, arg_string in enumerate(arg_strings_iter):
1370
1371 # all args after -- are non-options
1372 if arg_string == '--':
1373 arg_string_pattern_parts.append('-')
1374 for arg_string in arg_strings_iter:
1375 arg_string_pattern_parts.append('A')
1376
1377 # otherwise, add the arg to the arg strings
1378 # and note the index if it was an option
1379 else:
1380 option_tuple = self._parse_optional(arg_string)
1381 if option_tuple is None:
1382 pattern = 'A'
1383 else:
1384 option_string_indices[i] = option_tuple
1385 pattern = 'O'
1386 arg_string_pattern_parts.append(pattern)
1387
1388 # join the pieces together to form the pattern
1389 arg_strings_pattern = ''.join(arg_string_pattern_parts)
1390
1391 # converts arg strings to the appropriate and then takes the action
1392 def take_action(action, argument_strings, option_string=None):
1393 argument_values = self._get_values(action, argument_strings)
1394 # take the action if we didn't receive a SUPPRESS value
1395 # (e.g. from a default)
1396 if argument_values is not SUPPRESS:
1397 action(self, namespace, argument_values, option_string)
1398
1399 # function to convert arg_strings into an optional action
1400 def consume_optional(start_index):
1401
1402 # determine the optional action and parse any explicit
1403 # argument out of the option string
1404 option_tuple = option_string_indices[start_index]
1405 action, option_string, explicit_arg = option_tuple
1406
1407 # loop because single-dash options can be chained
1408 # (e.g. -xyz is the same as -x -y -z if no args are required)
1409 match_argument = self._match_argument
1410 action_tuples = []
1411 while True:
1412
1413 # if we found no optional action, raise an error
1414 if action is None:
1415 self.error(_('no such option: %s') % option_string)
1416
1417 # if there is an explicit argument, try to match the
1418 # optional's string arguments to only this
1419 if explicit_arg is not None:
1420 arg_count = match_argument(action, 'A')
1421
1422 # if the action is a single-dash option and takes no
1423 # arguments, try to parse more single-dash options out
1424 # of the tail of the option string
1425 chars = self.prefix_chars
1426 if arg_count == 0 and option_string[1] not in chars:
1427 action_tuples.append((action, [], option_string))
1428 parse_optional = self._parse_optional
1429 for char in self.prefix_chars:
1430 option_string = char + explicit_arg
1431 option_tuple = parse_optional(option_string)
1432 if option_tuple[0] is not None:
1433 break
1434 else:
1435 msg = _('ignored explicit argument %r')
1436 raise ArgumentError(action, msg % explicit_arg)
1437
1438 # set the action, etc. for the next loop iteration
1439 action, option_string, explicit_arg = option_tuple
1440
1441 # if the action expect exactly one argument, we've
1442 # successfully matched the option; exit the loop
1443 elif arg_count == 1:
1444 stop = start_index + 1
1445 args = [explicit_arg]
1446 action_tuples.append((action, args, option_string))
1447 break
1448
1449 # error if a double-dash option did not use the
1450 # explicit argument
1451 else:
1452 msg = _('ignored explicit argument %r')
1453 raise ArgumentError(action, msg % explicit_arg)
1454
1455 # if there is no explicit argument, try to match the
1456 # optional's string arguments with the following strings
1457 # if successful, exit the loop
1458 else:
1459 start = start_index + 1
1460 selected_patterns = arg_strings_pattern[start:]
1461 arg_count = match_argument(action, selected_patterns)
1462 stop = start + arg_count
1463 args = arg_strings[start:stop]
1464 action_tuples.append((action, args, option_string))
1465 break
1466
1467 # add the Optional to the list and return the index at which
1468 # the Optional's string args stopped
1469 assert action_tuples
1470 for action, args, option_string in action_tuples:
1471 take_action(action, args, option_string)
1472 return stop
1473
1474 # the list of Positionals left to be parsed; this is modified
1475 # by consume_positionals()
1476 positionals = self._get_positional_actions()
1477
1478 # function to convert arg_strings into positional actions
1479 def consume_positionals(start_index):
1480 # match as many Positionals as possible
1481 match_partial = self._match_arguments_partial
1482 selected_pattern = arg_strings_pattern[start_index:]
1483 arg_counts = match_partial(positionals, selected_pattern)
1484
1485 # slice off the appropriate arg strings for each Positional
1486 # and add the Positional and its args to the list
1487 for action, arg_count in zip(positionals, arg_counts):
1488 args = arg_strings[start_index: start_index + arg_count]
1489 start_index += arg_count
1490 take_action(action, args)
1491
1492 # slice off the Positionals that we just parsed and return the
1493 # index at which the Positionals' string args stopped
1494 positionals[:] = positionals[len(arg_counts):]
1495 return start_index
1496
1497 # consume Positionals and Optionals alternately, until we have
1498 # passed the last option string
1499 start_index = 0
1500 if option_string_indices:
1501 max_option_string_index = max(option_string_indices)
1502 else:
1503 max_option_string_index = -1
1504 while start_index <= max_option_string_index:
1505
1506 # consume any Positionals preceding the next option
1507 next_option_string_index = min(
1508 index
1509 for index in option_string_indices
1510 if index >= start_index)
1511 if start_index != next_option_string_index:
1512 positionals_end_index = consume_positionals(start_index)
1513
1514 # only try to parse the next optional if we didn't consume
1515 # the option string during the positionals parsing
1516 if positionals_end_index > start_index:
1517 start_index = positionals_end_index
1518 continue
1519 else:
1520 start_index = positionals_end_index
1521
1522 # if we consumed all the positionals we could and we're not
1523 # at the index of an option string, there were unparseable
1524 # arguments
1525 if start_index not in option_string_indices:
1526 msg = _('extra arguments found: %s')
1527 extras = arg_strings[start_index:next_option_string_index]
1528 self.error(msg % ' '.join(extras))
1529
1530 # consume the next optional and any arguments for it
1531 start_index = consume_optional(start_index)
1532
1533 # consume any positionals following the last Optional
1534 stop_index = consume_positionals(start_index)
1535
1536 # if we didn't consume all the argument strings, there were too
1537 # many supplied
1538 if stop_index != len(arg_strings):
1539 extras = arg_strings[stop_index:]
1540 self.error(_('extra arguments found: %s') % ' '.join(extras))
1541
1542 # if we didn't use all the Positional objects, there were too few
1543 # arg strings supplied.
1544 if positionals:
1545 self.error(_('too few arguments'))
1546
1547 # return the updated namespace
1548 return namespace
1549
1550 def _match_argument(self, action, arg_strings_pattern):
1551 # match the pattern for this action to the arg strings
1552 nargs_pattern = self._get_nargs_pattern(action)
1553 match = _re.match(nargs_pattern, arg_strings_pattern)
1554
1555 # raise an exception if we weren't able to find a match
1556 if match is None:
1557 nargs_errors = {
1558 None:_('expected one argument'),
1559 OPTIONAL:_('expected at most one argument'),
1560 ONE_OR_MORE:_('expected at least one argument')
1561 }
1562 default = _('expected %s argument(s)') % action.nargs
1563 msg = nargs_errors.get(action.nargs, default)
1564 raise ArgumentError(action, msg)
1565
1566 # return the number of arguments matched
1567 return len(match.group(1))
1568
1569 def _match_arguments_partial(self, actions, arg_strings_pattern):
1570 # progressively shorten the actions list by slicing off the
1571 # final actions until we find a match
1572 result = []
1573 for i in xrange(len(actions), 0, -1):
1574 actions_slice = actions[:i]
1575 pattern = ''.join(self._get_nargs_pattern(action)
1576 for action in actions_slice)
1577 match = _re.match(pattern, arg_strings_pattern)
1578 if match is not None:
1579 result.extend(len(string) for string in match.groups())
1580 break
1581
1582 # return the list of arg string counts
1583 return result
1584
1585 def _parse_optional(self, arg_string):
1586 # if it doesn't start with a prefix, it was meant to be positional
1587 if not arg_string[0] in self.prefix_chars:
1588 return None
1589
1590 # if it's just dashes, it was meant to be positional
1591 if not arg_string.strip('-'):
1592 return None
1593
1594 # if the option string is present in the parser, return the action
1595 if arg_string in self._option_strings:
1596 action = self._option_strings[arg_string]
1597 return action, arg_string, None
1598
1599 # search through all possible prefixes of the option string
1600 # and all actions in the parser for possible interpretations
1601 option_tuples = []
1602 prefix_tuples = self._get_option_prefix_tuples(arg_string)
1603 for option_string in self._option_strings:
1604 for option_prefix, explicit_arg in prefix_tuples:
1605 if option_string.startswith(option_prefix):
1606 action = self._option_strings[option_string]
1607 tup = action, option_string, explicit_arg
1608 option_tuples.append(tup)
1609 break
1610
1611 # if multiple actions match, the option string was ambiguous
1612 if len(option_tuples) > 1:
1613 options = ', '.join(opt_str for _, opt_str, _ in option_tuples)
1614 tup = arg_string, options
1615 self.error(_('ambiguous option: %s could match %s') % tup)
1616
1617 # if exactly one action matched, this segmentation is good,
1618 # so return the parsed action
1619 elif len(option_tuples) == 1:
1620 option_tuple, = option_tuples
1621 return option_tuple
1622
1623 # if it was not found as an option, but it looks like a negative
1624 # number, it was meant to be positional
1625 if self._negative_number_matcher.match(arg_string):
1626 return None
1627
1628 # it was meant to be an optional but there is no such option
1629 # in this parser (though it might be a valid option in a subparser)
1630 return None, arg_string, None
1631
1632 def _get_option_prefix_tuples(self, option_string):
1633 result = []
1634
1635 # option strings starting with two prefix characters are only
1636 # split at the '='
1637 chars = self.prefix_chars
1638 if option_string[0] in chars and option_string[1] in chars:
1639 if '=' in option_string:
1640 option_prefix, explicit_arg = option_string.split('=', 1)
1641 else:
1642 option_prefix = option_string
1643 explicit_arg = None
1644 tup = option_prefix, explicit_arg
1645 result.append(tup)
1646
1647 # option strings starting with a single prefix character are
1648 # split at all indices
1649 else:
1650 for first_index, char in enumerate(option_string):
1651 if char not in self.prefix_chars:
1652 break
1653 for i in xrange(len(option_string), first_index, -1):
1654 tup = option_string[:i], option_string[i:] or None
1655 result.append(tup)
1656
1657 # return the collected prefix tuples
1658 return result
1659
1660 def _get_nargs_pattern(self, action):
1661 # in all examples below, we have to allow for '--' args
1662 # which are represented as '-' in the pattern
1663 nargs = action.nargs
1664
1665 # the default (None) is assumed to be a single argument
1666 if nargs is None:
1667 nargs_pattern = '(-*A-*)'
1668
1669 # allow zero or one arguments
1670 elif nargs == OPTIONAL:
1671 nargs_pattern = '(-*A?-*)'
1672
1673 # allow zero or more arguments
1674 elif nargs == ZERO_OR_MORE:
1675 nargs_pattern = '(-*[A-]*)'
1676
1677 # allow one or more arguments
1678 elif nargs == ONE_OR_MORE:
1679 nargs_pattern = '(-*A[A-]*)'
1680
1681 # allow one argument followed by any number of options or arguments
1682 elif nargs is PARSER:
1683 nargs_pattern = '(-*A[-AO]*)'
1684
1685 # all others should be integers
1686 else:
1687 nargs_pattern = '(-*%s-*)' % '-*'.join('A' * nargs)
1688
1689 # if this is an optional action, -- is not allowed
1690 if action.option_strings:
1691 nargs_pattern = nargs_pattern.replace('-*', '')
1692 nargs_pattern = nargs_pattern.replace('-', '')
1693
1694 # return the pattern
1695 return nargs_pattern
1696
1697 # ========================
1698 # Value conversion methods
1699 # ========================
1700
1701 def _get_values(self, action, arg_strings):
1702 # for everything but PARSER args, strip out '--'
1703 if action.nargs is not PARSER:
1704 arg_strings = [s for s in arg_strings if s != '--']
1705
1706 # optional argument produces a default when not present
1707 if not arg_strings and action.nargs == OPTIONAL:
1708 if action.option_strings:
1709 value = action.const
1710 else:
1711 value = action.default
1712 if isinstance(value, basestring):
1713 value = self._get_value(action, value)
1714 self._check_value(action, value)
1715
1716 # when nargs='*' on a positional, if there were no command-line
1717 # args, use the default if it is anything other than None
1718 elif (not arg_strings and action.nargs == ZERO_OR_MORE and
1719 not action.option_strings):
1720 if action.default is not None:
1721 value = action.default
1722 else:
1723 value = arg_strings
1724 self._check_value(action, value)
1725
1726 # single argument or optional argument produces a single value
1727 elif len(arg_strings) == 1 and action.nargs in [None, OPTIONAL]:
1728 arg_string, = arg_strings
1729 value = self._get_value(action, arg_string)
1730 self._check_value(action, value)
1731
1732 # PARSER arguments convert all values, but check only the first
1733 elif action.nargs is PARSER:
1734 value = list(self._get_value(action, v) for v in arg_strings)
1735 self._check_value(action, value[0])
1736
1737 # all other types of nargs produce a list
1738 else:
1739 value = list(self._get_value(action, v) for v in arg_strings)
1740 for v in value:
1741 self._check_value(action, v)
1742
1743 # return the converted value
1744 return value
1745
1746 def _get_value(self, action, arg_string):
1747 type_func = self._registry_get('type', action.type, action.type)
1748 if not callable(type_func):
1749 msg = _('%r is not callable')
1750 raise ArgumentError(action, msg % type_func)
1751
1752 # convert the value to the appropriate type
1753 try:
1754 result = type_func(arg_string)
1755
1756 # TypeErrors or ValueErrors indicate errors
1757 except (TypeError, ValueError):
1758 name = getattr(action.type, '__name__', repr(action.type))
1759 msg = _('invalid %s value: %r')
1760 raise ArgumentError(action, msg % (name, arg_string))
1761
1762 # return the converted value
1763 return result
1764
1765 def _check_value(self, action, value):
1766 # converted value must be one of the choices (if specified)
1767 if action.choices is not None and value not in action.choices:
1768 tup = value, ', '.join(map(repr, action.choices))
1769 msg = _('invalid choice: %r (choose from %s)') % tup
1770 raise ArgumentError(action, msg)
1771
1772
1773
1774 # =======================
1775 # Help-formatting methods
1776 # =======================
1777
1778 def format_usage(self):
1779 formatter = self._get_formatter()
1780 formatter.add_usage(self.usage,
1781 self._get_optional_actions(),
1782 self._get_positional_actions())
1783 return formatter.format_help()
1784
1785 def format_help(self):
1786 formatter = self._get_formatter()
1787
1788 # usage
1789 formatter.add_usage(self.usage,
1790 self._get_optional_actions(),
1791 self._get_positional_actions())
1792
1793 # description
1794 formatter.add_text(self.description)
1795
1796 # positionals
1797 formatter.start_section(_('positional arguments'))
1798 formatter.add_arguments(self._positional_actions_list)
1799 formatter.end_section()
1800
1801 # optionals
1802 formatter.start_section(_('optional arguments'))
1803 formatter.add_arguments(self._optional_actions_list)
1804 formatter.end_section()
1805
1806 # user-defined groups
1807 for argument_group in self._argument_groups:
1808 formatter.start_section(argument_group.title)
1809 formatter.add_text(argument_group.description)
1810 formatter.add_arguments(argument_group._positional_actions_list)
1811 formatter.add_arguments(argument_group._optional_actions_list)
1812 formatter.end_section()
1813
1814 # epilog
1815 formatter.add_text(self.epilog)
1816
1817 # determine help from format above
1818 return formatter.format_help()
1819
1820 def format_version(self):
1821 formatter = self._get_formatter()
1822 formatter.add_text(self.version)
1823 return formatter.format_help()
1824
1825 def _get_formatter(self):
1826 return self.formatter_class(prog=self.prog)
1827
1828 # =====================
1829 # Help-printing methods
1830 # =====================
1831
1832 def print_usage(self, file=None):
1833 self._print_message(self.format_usage(), file)
1834
1835 def print_help(self, file=None):
1836 self._print_message(self.format_help(), file)
1837
1838 def print_version(self, file=None):
1839 self._print_message(self.format_version(), file)
1840
1841 def _print_message(self, message, file=None):
1842 if message:
1843 if file is None:
1844 file = _sys.stderr
1845 file.write(message)
1846
1847
1848 # ===============
1849 # Exiting methods
1850 # ===============
1851
1852 def exit(self, status=0, message=None):
1853 if message:
1854 _sys.stderr.write(message)
1855 _sys.exit(status)
1856
1857 def error(self, message):
1858 """error(message: string)
1859
1860 Prints a usage message incorporating the message to stderr and
1861 exits.
1862
1863 If you override this in a subclass, it should not return -- it
1864 should either exit or raise an exception.
1865 """
1866 self.print_usage(_sys.stderr)
1867 self.exit(2, _('%s: error: %s\n') % (self.prog, message))
@@ -0,0 +1,155
1 # encoding: utf-8
2
3 """This file contains unittests for the frontendbase module."""
4
5 __docformat__ = "restructuredtext en"
6
7 #---------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
9 #
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
12 #---------------------------------------------------------------------------
13
14 #---------------------------------------------------------------------------
15 # Imports
16 #---------------------------------------------------------------------------
17
18 import unittest
19
20 try:
21 from IPython.frontend.asyncfrontendbase import AsyncFrontEndBase
22 from IPython.frontend import frontendbase
23 from IPython.kernel.engineservice import EngineService
24 except ImportError:
25 import nose
26 raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap")
27
28 from IPython.testing.decorators import skip
29
30 class FrontEndCallbackChecker(AsyncFrontEndBase):
31 """FrontEndBase subclass for checking callbacks"""
32 def __init__(self, engine=None, history=None):
33 super(FrontEndCallbackChecker, self).__init__(engine=engine,
34 history=history)
35 self.updateCalled = False
36 self.renderResultCalled = False
37 self.renderErrorCalled = False
38
39 def update_cell_prompt(self, result, blockID=None):
40 self.updateCalled = True
41 return result
42
43 def render_result(self, result):
44 self.renderResultCalled = True
45 return result
46
47
48 def render_error(self, failure):
49 self.renderErrorCalled = True
50 return failure
51
52
53
54
55 class TestAsyncFrontendBase(unittest.TestCase):
56 def setUp(self):
57 """Setup the EngineService and FrontEndBase"""
58
59 self.fb = FrontEndCallbackChecker(engine=EngineService())
60
61 def test_implements_IFrontEnd(self):
62 assert(frontendbase.IFrontEnd.implementedBy(
63 AsyncFrontEndBase))
64
65 def test_is_complete_returns_False_for_incomplete_block(self):
66 """"""
67
68 block = """def test(a):"""
69
70 assert(self.fb.is_complete(block) == False)
71
72 def test_is_complete_returns_True_for_complete_block(self):
73 """"""
74
75 block = """def test(a): pass"""
76
77 assert(self.fb.is_complete(block))
78
79 block = """a=3"""
80
81 assert(self.fb.is_complete(block))
82
83 def test_blockID_added_to_result(self):
84 block = """3+3"""
85
86 d = self.fb.execute(block, blockID='TEST_ID')
87
88 d.addCallback(self.checkBlockID, expected='TEST_ID')
89
90 def test_blockID_added_to_failure(self):
91 block = "raise Exception()"
92
93 d = self.fb.execute(block,blockID='TEST_ID')
94 d.addErrback(self.checkFailureID, expected='TEST_ID')
95
96 def checkBlockID(self, result, expected=""):
97 assert(result['blockID'] == expected)
98
99
100 def checkFailureID(self, failure, expected=""):
101 assert(failure.blockID == expected)
102
103
104 def test_callbacks_added_to_execute(self):
105 """test that
106 update_cell_prompt
107 render_result
108
109 are added to execute request
110 """
111
112 d = self.fb.execute("10+10")
113 d.addCallback(self.checkCallbacks)
114
115 def checkCallbacks(self, result):
116 assert(self.fb.updateCalled)
117 assert(self.fb.renderResultCalled)
118
119 @skip("This test fails and lead to an unhandled error in a Deferred.")
120 def test_error_callback_added_to_execute(self):
121 """test that render_error called on execution error"""
122
123 d = self.fb.execute("raise Exception()")
124 d.addCallback(self.checkRenderError)
125
126 def checkRenderError(self, result):
127 assert(self.fb.renderErrorCalled)
128
129 def test_history_returns_expected_block(self):
130 """Make sure history browsing doesn't fail"""
131
132 blocks = ["a=1","a=2","a=3"]
133 for b in blocks:
134 d = self.fb.execute(b)
135
136 # d is now the deferred for the last executed block
137 d.addCallback(self.historyTests, blocks)
138
139
140 def historyTests(self, result, blocks):
141 """historyTests"""
142
143 assert(len(blocks) >= 3)
144 assert(self.fb.get_history_previous("") == blocks[-2])
145 assert(self.fb.get_history_previous("") == blocks[-3])
146 assert(self.fb.get_history_next() == blocks[-2])
147
148
149 def test_history_returns_none_at_startup(self):
150 """test_history_returns_none_at_startup"""
151
152 assert(self.fb.get_history_previous("")==None)
153 assert(self.fb.get_history_next()==None)
154
155
@@ -0,0 +1,26
1 # encoding: utf-8
2
3 """This file contains unittests for the interpreter.py module."""
4
5 __docformat__ = "restructuredtext en"
6
7 #-----------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
9 #
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
12 #-----------------------------------------------------------------------------
13
14 #-----------------------------------------------------------------------------
15 # Imports
16 #-----------------------------------------------------------------------------
17
18 from IPython.kernel.core.interpreter import Interpreter
19
20 def test_unicode():
21 """ Test unicode handling with the interpreter.
22 """
23 i = Interpreter()
24 i.execute_python(u'print "ù"')
25 i.execute_python('print "ù"')
26
@@ -0,0 +1,53
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 """IPython Test Suite Runner.
4 """
5
6 import sys
7 import warnings
8
9 from nose.core import TestProgram
10 import nose.plugins.builtin
11
12 from IPython.testing.plugin.ipdoctest import IPythonDoctest
13
14 def main():
15 """Run the IPython test suite.
16 """
17
18 warnings.filterwarnings('ignore',
19 'This will be removed soon. Use IPython.testing.util instead')
20
21
22 # construct list of plugins, omitting the existing doctest plugin
23 plugins = [IPythonDoctest()]
24 for p in nose.plugins.builtin.plugins:
25 plug = p()
26 if plug.name == 'doctest':
27 continue
28
29 #print 'adding plugin:',plug.name # dbg
30 plugins.append(plug)
31
32 argv = sys.argv + ['--doctest-tests','--doctest-extension=txt',
33 '--detailed-errors',
34
35 # We add --exe because of setuptools' imbecility (it
36 # blindly does chmod +x on ALL files). Nose does the
37 # right thing and it tries to avoid executables,
38 # setuptools unfortunately forces our hand here. This
39 # has been discussed on the distutils list and the
40 # setuptools devs refuse to fix this problem!
41 '--exe',
42 ]
43
44 has_ip = False
45 for arg in sys.argv:
46 if 'IPython' in arg:
47 has_ip = True
48 break
49
50 if not has_ip:
51 argv.append('IPython')
52
53 TestProgram(argv=argv,plugins=plugins)
1 NO CONTENT: new file 100644
@@ -0,0 +1,161
1 """Tests for the decorators we've created for IPython.
2 """
3
4 # Module imports
5 # Std lib
6 import inspect
7 import sys
8
9 # Third party
10 import nose.tools as nt
11
12 # Our own
13 from IPython.testing import decorators as dec
14
15
16 #-----------------------------------------------------------------------------
17 # Utilities
18
19 # Note: copied from OInspect, kept here so the testing stuff doesn't create
20 # circular dependencies and is easier to reuse.
21 def getargspec(obj):
22 """Get the names and default values of a function's arguments.
23
24 A tuple of four things is returned: (args, varargs, varkw, defaults).
25 'args' is a list of the argument names (it may contain nested lists).
26 'varargs' and 'varkw' are the names of the * and ** arguments or None.
27 'defaults' is an n-tuple of the default values of the last n arguments.
28
29 Modified version of inspect.getargspec from the Python Standard
30 Library."""
31
32 if inspect.isfunction(obj):
33 func_obj = obj
34 elif inspect.ismethod(obj):
35 func_obj = obj.im_func
36 else:
37 raise TypeError, 'arg is not a Python function'
38 args, varargs, varkw = inspect.getargs(func_obj.func_code)
39 return args, varargs, varkw, func_obj.func_defaults
40
41 #-----------------------------------------------------------------------------
42 # Testing functions
43
44 @dec.skip
45 def test_deliberately_broken():
46 """A deliberately broken test - we want to skip this one."""
47 1/0
48
49 @dec.skip('foo')
50 def test_deliberately_broken2():
51 """Another deliberately broken test - we want to skip this one."""
52 1/0
53
54
55 # Verify that we can correctly skip the doctest for a function at will, but
56 # that the docstring itself is NOT destroyed by the decorator.
57 @dec.skip_doctest
58 def doctest_bad(x,y=1,**k):
59 """A function whose doctest we need to skip.
60
61 >>> 1+1
62 3
63 """
64 print 'x:',x
65 print 'y:',y
66 print 'k:',k
67
68
69 def call_doctest_bad():
70 """Check that we can still call the decorated functions.
71
72 >>> doctest_bad(3,y=4)
73 x: 3
74 y: 4
75 k: {}
76 """
77 pass
78
79
80 def test_skip_dt_decorator():
81 """Doctest-skipping decorator should preserve the docstring.
82 """
83 # Careful: 'check' must be a *verbatim* copy of the doctest_bad docstring!
84 check = """A function whose doctest we need to skip.
85
86 >>> 1+1
87 3
88 """
89 # Fetch the docstring from doctest_bad after decoration.
90 val = doctest_bad.__doc__
91
92 assert check==val,"doctest_bad docstrings don't match"
93
94 # Doctest skipping should work for class methods too
95 class foo(object):
96 """Foo
97
98 Example:
99
100 >>> 1+1
101 2
102 """
103
104 @dec.skip_doctest
105 def __init__(self,x):
106 """Make a foo.
107
108 Example:
109
110 >>> f = foo(3)
111 junk
112 """
113 print 'Making a foo.'
114 self.x = x
115
116 @dec.skip_doctest
117 def bar(self,y):
118 """Example:
119
120 >>> f = foo(3)
121 >>> f.bar(0)
122 boom!
123 >>> 1/0
124 bam!
125 """
126 return 1/y
127
128 def baz(self,y):
129 """Example:
130
131 >>> f = foo(3)
132 Making a foo.
133 >>> f.baz(3)
134 True
135 """
136 return self.x==y
137
138
139
140 def test_skip_dt_decorator2():
141 """Doctest-skipping decorator should preserve function signature.
142 """
143 # Hardcoded correct answer
144 dtargs = (['x', 'y'], None, 'k', (1,))
145 # Introspect out the value
146 dtargsr = getargspec(doctest_bad)
147 assert dtargsr==dtargs, \
148 "Incorrectly reconstructed args for doctest_bad: %s" % (dtargsr,)
149
150
151 @dec.skip_linux
152 def test_linux():
153 nt.assert_not_equals(sys.platform,'linux2',"This test can't run under linux")
154
155 @dec.skip_win32
156 def test_win32():
157 nt.assert_not_equals(sys.platform,'win32',"This test can't run under windows")
158
159 @dec.skip_osx
160 def test_osx():
161 nt.assert_not_equals(sys.platform,'darwin',"This test can't run under osx")
1 NO CONTENT: new file 100644
@@ -0,0 +1,32
1 # encoding: utf-8
2
3 """Tests for genutils.py"""
4
5 __docformat__ = "restructuredtext en"
6
7 #-----------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
9 #
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
12 #-----------------------------------------------------------------------------
13
14 #-----------------------------------------------------------------------------
15 # Imports
16 #-----------------------------------------------------------------------------
17
18 from IPython import genutils
19
20
21 def test_get_home_dir():
22 """Make sure we can get the home directory."""
23 home_dir = genutils.get_home_dir()
24
25 def test_get_ipython_dir():
26 """Make sure we can get the ipython directory."""
27 ipdir = genutils.get_ipython_dir()
28
29 def test_get_security_dir():
30 """Make sure we can get the ipython/security directory."""
31 sdir = genutils.get_security_dir()
32 No newline at end of file
@@ -0,0 +1,21
1 """ Tests for various magic functions
2
3 Needs to be run by nose (to make ipython session available)
4
5 """
6 def test_rehashx():
7 # clear up everything
8 _ip.IP.alias_table.clear()
9 del _ip.db['syscmdlist']
10
11 _ip.magic('rehashx')
12 # Practically ALL ipython development systems will have more than 10 aliases
13
14 assert len(_ip.IP.alias_table) > 10
15 for key, val in _ip.IP.alias_table.items():
16 # we must strip dots from alias names
17 assert '.' not in key
18
19 # rehashx must fill up syscmdlist
20 scoms = _ip.db['syscmdlist']
21 assert len(scoms) > 10
@@ -0,0 +1,22
1 # If you want ipython to appear in a linux app launcher ("start menu"), install this by doing:
2 # sudo desktop-file-install ipython-sh.desktop
3
4 [Desktop Entry]
5 Comment=Perform shell-like tasks in interactive ipython session
6 Exec=ipython -p sh
7 GenericName[en_US]=IPython shell mode
8 GenericName=IPython shell mode
9 Icon=gnome-netstatus-idle
10 MimeType=
11 Name[en_US]=ipython-sh
12 Name=ipython-sh
13 Path=
14 Categories=Development;Utility;
15 StartupNotify=false
16 Terminal=true
17 TerminalOptions=
18 Type=Application
19 X-DBUS-ServiceName=
20 X-DBUS-StartupType=none
21 X-KDE-SubstituteUID=false
22 X-KDE-Username=
@@ -0,0 +1,22
1 # If you want ipython to appear in a linux app launcher ("start menu"), install this by doing:
2 # sudo desktop-file-install ipython.desktop
3
4 [Desktop Entry]
5 Comment=Enhanced interactive Python shell
6 Exec=ipython
7 GenericName[en_US]=IPython
8 GenericName=IPython
9 Icon=gnome-netstatus-idle
10 MimeType=
11 Name[en_US]=ipython
12 Name=ipython
13 Path=
14 Categories=Development;Utility;
15 StartupNotify=false
16 Terminal=true
17 TerminalOptions=
18 Type=Application
19 X-DBUS-ServiceName=
20 X-DBUS-StartupType=none
21 X-KDE-SubstituteUID=false
22 X-KDE-Username=
@@ -0,0 +1,240
1 .. _paralleltask:
2
3 ==========================
4 The IPython task interface
5 ==========================
6
7 .. contents::
8
9 The ``Task`` interface to the controller presents the engines as a fault tolerant, dynamic load-balanced system or workers. Unlike the ``MultiEngine`` interface, in the ``Task`` interface, the user have no direct access to individual engines. In some ways, this interface is simpler, but in other ways it is more powerful. Best of all the user can use both of these interfaces at the same time to take advantage or both of their strengths. When the user can break up the user's work into segments that do not depend on previous execution, the ``Task`` interface is ideal. But it also has more power and flexibility, allowing the user to guide the distribution of jobs, without having to assign Tasks to engines explicitly.
10
11 Starting the IPython controller and engines
12 ===========================================
13
14 To follow along with this tutorial, the user will need to start the IPython
15 controller and four IPython engines. The simplest way of doing this is to
16 use the ``ipcluster`` command::
17
18 $ ipcluster -n 4
19
20 For more detailed information about starting the controller and engines, see our :ref:`introduction <ip1par>` to using IPython for parallel computing.
21
22 The magic here is that this single controller and set of engines is running both the MultiEngine and ``Task`` interfaces simultaneously.
23
24 QuickStart Task Farming
25 =======================
26
27 First, a quick example of how to start running the most basic Tasks.
28 The first step is to import the IPython ``client`` module and then create a ``TaskClient`` instance::
29
30 In [1]: from IPython.kernel import client
31
32 In [2]: tc = client.TaskClient()
33
34 Then the user wrap the commands the user want to run in Tasks::
35
36 In [3]: tasklist = []
37 In [4]: for n in range(1000):
38 ... tasklist.append(client.Task("a = %i"%n, pull="a"))
39
40 The first argument of the ``Task`` constructor is a string, the command to be executed. The most important optional keyword argument is ``pull``, which can be a string or list of strings, and it specifies the variable names to be saved as results of the ``Task``.
41
42 Next, the user need to submit the Tasks to the ``TaskController`` with the ``TaskClient``::
43
44 In [5]: taskids = [ tc.run(t) for t in tasklist ]
45
46 This will give the user a list of the TaskIDs used by the controller to keep track of the Tasks and their results. Now at some point the user are going to want to get those results back. The ``barrier`` method allows the user to wait for the Tasks to finish running::
47
48 In [6]: tc.barrier(taskids)
49
50 This command will block until all the Tasks in ``taskids`` have finished. Now, the user probably want to look at the user's results::
51
52 In [7]: task_results = [ tc.get_task_result(taskid) for taskid in taskids ]
53
54 Now the user have a list of ``TaskResult`` objects, which have the actual result as a dictionary, but also keep track of some useful metadata about the ``Task``::
55
56 In [8]: tr = ``Task``_results[73]
57
58 In [9]: tr
59 Out[9]: ``TaskResult``[ID:73]:{'a':73}
60
61 In [10]: tr.engineid
62 Out[10]: 1
63
64 In [11]: tr.submitted, tr.completed, tr.duration
65 Out[11]: ("2008/03/08 03:41:42", "2008/03/08 03:41:44", 2.12345)
66
67 The actual results are stored in a dictionary, ``tr.results``, and a namespace object ``tr.ns`` which accesses the result keys by attribute::
68
69 In [12]: tr.results['a']
70 Out[12]: 73
71
72 In [13]: tr.ns.a
73 Out[13]: 73
74
75 That should cover the basics of running simple Tasks. There are several more powerful things the user can do with Tasks covered later. The most useful probably being using a ``MutiEngineClient`` interface to initialize all the engines with the import dependencies necessary to run the user's Tasks.
76
77 There are many options for running and managing Tasks. The best way to learn further about the ``Task`` interface is to study the examples in ``docs/examples``. If the user do so and learn a lots about this interface, we encourage the user to expand this documentation about the ``Task`` system.
78
79 Overview of the Task System
80 ===========================
81
82 The user's view of the ``Task`` system has three basic objects: The ``TaskClient``, the ``Task``, and the ``TaskResult``. The names of these three objects well indicate their role.
83
84 The ``TaskClient`` is the user's ``Task`` farming connection to the IPython cluster. Unlike the ``MultiEngineClient``, the ``TaskControler`` handles all the scheduling and distribution of work, so the ``TaskClient`` has no notion of engines, it just submits Tasks and requests their results. The Tasks are described as ``Task`` objects, and their results are wrapped in ``TaskResult`` objects. Thus, there are very few necessary methods for the user to manage.
85
86 Inside the task system is a Scheduler object, which assigns tasks to workers. The default scheduler is a simple FIFO queue. Subclassing the Scheduler should be easy, just implementing your own priority system.
87
88 The TaskClient
89 ==============
90
91 The ``TaskClient`` is the object the user use to connect to the ``Controller`` that is managing the user's Tasks. It is the analog of the ``MultiEngineClient`` for the standard IPython multiplexing interface. As with all client interfaces, the first step is to import the IPython Client Module::
92
93 In [1]: from IPython.kernel import client
94
95 Just as with the ``MultiEngineClient``, the user create the ``TaskClient`` with a tuple, containing the ip-address and port of the ``Controller``. the ``client`` module conveniently has the default address of the ``Task`` interface of the controller. Creating a default ``TaskClient`` object would be done with this::
96
97 In [2]: tc = client.TaskClient(client.default_task_address)
98
99 or, if the user want to specify a non default location of the ``Controller``, the user can specify explicitly::
100
101 In [3]: tc = client.TaskClient(("192.168.1.1", 10113))
102
103 As discussed earlier, the ``TaskClient`` only has a few basic methods.
104
105 * ``tc.run(task)``
106 ``run`` is the method by which the user submits Tasks. It takes exactly one argument, a ``Task`` object. All the advanced control of ``Task`` behavior is handled by properties of the ``Task`` object, rather than the submission command, so they will be discussed later in the `Task`_ section. ``run`` returns an integer, the ``Task``ID by which the ``Task`` and its results can be tracked and retrieved::
107
108 In [4]: ``Task``ID = tc.run(``Task``)
109
110 * ``tc.get_task_result(taskid, block=``False``)``
111 ``get_task_result`` is the method by which results are retrieved. It takes a single integer argument, the ``Task``ID`` of the result the user wish to retrieve. ``get_task_result`` also takes a keyword argument ``block``. ``block`` specifies whether the user actually want to wait for the result. If ``block`` is false, as it is by default, ``get_task_result`` will return immediately. If the ``Task`` has completed, it will return the ``TaskResult`` object for that ``Task``. But if the ``Task`` has not completed, it will return ``None``. If the user specify ``block=``True``, then ``get_task_result`` will wait for the ``Task`` to complete, and always return the ``TaskResult`` for the requested ``Task``.
112 * ``tc.barrier(taskid(s))``
113 ``barrier`` is a synchronization method. It takes exactly one argument, a ``Task``ID or list of taskIDs. ``barrier`` will block until all the specified Tasks have completed. In practice, a barrier is often called between the ``Task`` submission section of the code and the result gathering section::
114
115 In [5]: taskIDs = [ tc.run(``Task``) for ``Task`` in myTasks ]
116
117 In [6]: tc.get_task_result(taskIDs[-1]) is None
118 Out[6]: ``True``
119
120 In [7]: tc.barrier(``Task``ID)
121
122 In [8]: results = [ tc.get_task_result(tid) for tid in taskIDs ]
123
124 * ``tc.queue_status(verbose=``False``)``
125 ``queue_status`` is a method for querying the state of the ``TaskControler``. ``queue_status`` returns a dict of the form::
126
127 {'scheduled': Tasks that have been submitted but yet run
128 'pending' : Tasks that are currently running
129 'succeeded': Tasks that have completed successfully
130 'failed' : Tasks that have finished with a failure
131 }
132
133 if @verbose is not specified (or is ``False``), then the values of the dict are integers - the number of Tasks in each state. if @verbose is ``True``, then each element in the dict is a list of the taskIDs in that state::
134
135 In [8]: tc.queue_status()
136 Out[8]: {'scheduled': 4,
137 'pending' : 2,
138 'succeeded': 5,
139 'failed' : 1
140 }
141
142 In [9]: tc.queue_status(verbose=True)
143 Out[9]: {'scheduled': [8,9,10,11],
144 'pending' : [6,7],
145 'succeeded': [0,1,2,4,5],
146 'failed' : [3]
147 }
148
149 * ``tc.abort(taskid)``
150 ``abort`` allows the user to abort Tasks that have already been submitted. ``abort`` will always return immediately. If the ``Task`` has completed, ``abort`` will raise an ``IndexError ``Task`` Already Completed``. An obvious case for ``abort`` would be where the user submits a long-running ``Task`` with a number of retries (see ``Task``_ section for how to specify retries) in an interactive session, but realizes there has been a typo. The user can then abort the ``Task``, preventing certain failures from cluttering up the queue. It can also be used for parallel search-type problems, where only one ``Task`` will give the solution, so once the user find the solution, the user would want to abort all remaining Tasks to prevent wasted work.
151 * ``tc.spin()``
152 ``spin`` simply triggers the scheduler in the ``TaskControler``. Under most normal circumstances, this will do nothing. The primary known usage case involves the ``Task`` dependency (see `Dependencies`_). The dependency is a function of an Engine's ``properties``, but changing the ``properties`` via the ``MutliEngineClient`` does not trigger a reschedule event. The main example case for this requires the following event sequence:
153 * ``engine`` is available, ``Task`` is submitted, but ``engine`` does not have ``Task``'s dependencies.
154 * ``engine`` gets necessary dependencies while no new Tasks are submitted or completed.
155 * now ``engine`` can run ``Task``, but a ``Task`` event is required for the ``TaskControler`` to try scheduling ``Task`` again.
156
157 ``spin`` is just an empty ping method to ensure that the Controller has scheduled all available Tasks, and should not be needed under most normal circumstances.
158
159 That covers the ``TaskClient``, a simple interface to the cluster. With this, the user can submit jobs (and abort if necessary), request their results, synchronize on arbitrary subsets of jobs.
160
161 .. _task: The Task Object
162
163 The Task Object
164 ===============
165
166 The ``Task`` is the basic object for describing a job. It can be used in a very simple manner, where the user just specifies a command string to be executed as the ``Task``. The usage of this first argument is exactly the same as the ``execute`` method of the ``MultiEngine`` (in fact, ``execute`` is called to run the code)::
167
168 In [1]: t = client.Task("a = str(id)")
169
170 This ``Task`` would run, and store the string representation of the ``id`` element in ``a`` in each worker's namespace, but it is fairly useless because the user does not know anything about the state of the ``worker`` on which it ran at the time of retrieving results. It is important that each ``Task`` not expect the state of the ``worker`` to persist after the ``Task`` is completed.
171 There are many different situations for using ``Task`` Farming, and the ``Task`` object has many attributes for use in customizing the ``Task`` behavior. All of a ``Task``'s attributes may be specified in the constructor, through keyword arguments, or after ``Task`` construction through attribute assignment.
172
173 Data Attributes
174 ***************
175 It is likely that the user may want to move data around before or after executing the ``Task``. We provide methods of sending data to initialize the worker's namespace, and specifying what data to bring back as the ``Task``'s results.
176
177 * pull = []
178 The obvious case is as above, where ``t`` would execute and store the result of ``myfunc`` in ``a``, it is likely that the user would want to bring ``a`` back to their namespace. This is done through the ``pull`` attribute. ``pull`` can be a string or list of strings, and it specifies the names of variables to be retrieved. The ``TaskResult`` object retrieved by ``get_task_result`` will have a dictionary of keys and values, and the ``Task``'s ``pull`` attribute determines what goes into it::
179
180 In [2]: t = client.Task("a = str(id)", pull = "a")
181
182 In [3]: t = client.Task("a = str(id)", pull = ["a", "id"])
183
184 * push = {}
185 A user might also want to initialize some data into the namespace before the code part of the ``Task`` is run. Enter ``push``. ``push`` is a dictionary of key/value pairs to be loaded from the user's namespace into the worker's immediately before execution::
186
187 In [4]: t = client.Task("a = f(submitted)", push=dict(submitted=time.time()), pull="a")
188
189 push and pull result directly in calling an ``engine``'s ``push`` and ``pull`` methods before and after ``Task`` execution respectively, and thus their api is the same.
190
191 Namespace Cleaning
192 ******************
193 When a user is running a large number of Tasks, it is likely that the namespace of the worker's could become cluttered. Some Tasks might be sensitive to clutter, while others might be known to cause namespace pollution. For these reasons, Tasks have two boolean attributes for cleaning up the namespace.
194
195 * ``clear_after``
196 if clear_after is specified ``True``, the worker on which the ``Task`` was run will be reset (via ``engine.reset``) upon completion of the ``Task``. This can be useful for both Tasks that produce clutter or Tasks whose intermediate data one might wish to be kept private::
197
198 In [5]: t = client.Task("a = range(1e10)", pull = "a",clear_after=True)
199
200
201 * ``clear_before``
202 as one might guess, clear_before is identical to ``clear_after``, but it takes place before the ``Task`` is run. This ensures that the ``Task`` runs on a fresh worker::
203
204 In [6]: t = client.Task("a = globals()", pull = "a",clear_before=True)
205
206 Of course, a user can both at the same time, ensuring that all workers are clear except when they are currently running a job. Both of these default to ``False``.
207
208 Fault Tolerance
209 ***************
210 It is possible that Tasks might fail, and there are a variety of reasons this could happen. One might be that the worker it was running on disconnected, and there was nothing wrong with the ``Task`` itself. With the fault tolerance attributes of the ``Task``, the user can specify how many times to resubmit the ``Task``, and what to do if it never succeeds.
211
212 * ``retries``
213 ``retries`` is an integer, specifying the number of times a ``Task`` is to be retried. It defaults to zero. It is often a good idea for this number to be 1 or 2, to protect the ``Task`` from disconnecting engines, but not a large number. If a ``Task`` is failing 100 times, there is probably something wrong with the ``Task``. The canonical bad example:
214
215 In [7]: t = client.Task("os.kill(os.getpid(), 9)", retries=99)
216
217 This would actually take down 100 workers.
218
219 * ``recovery_task``
220 ``recovery_task`` is another ``Task`` object, to be run in the event of the original ``Task`` still failing after running out of retries. Since ``recovery_task`` is another ``Task`` object, it can have its own ``recovery_task``. The chain of Tasks is limitless, except loops are not allowed (that would be bad!).
221
222 Dependencies
223 ************
224 Dependencies are the most powerful part of the ``Task`` farming system, because it allows the user to do some classification of the workers, and guide the ``Task`` distribution without meddling with the controller directly. It makes use of two objects - the ``Task``'s ``depend`` attribute, and the engine's ``properties``. See the `MultiEngine`_ reference for how to use engine properties. The engine properties api exists for extending IPython, allowing conditional execution and new controllers that make decisions based on properties of its engines. Currently the ``Task`` dependency is the only internal use of the properties api.
225
226 .. _MultiEngine: ./parallel_multiengine
227
228 The ``depend`` attribute of a ``Task`` must be a function of exactly one argument, the worker's properties dictionary, and it should return ``True`` if the ``Task`` should be allowed to run on the worker and ``False`` if not. The usage in the controller is fault tolerant, so exceptions raised by ``Task.depend`` will be ignored and functionally equivalent to always returning ``False``. Tasks`` with invalid ``depend`` functions will never be assigned to a worker::
229
230 In [8]: def dep(properties):
231 ... return properties["RAM"] > 2**32 # have at least 4GB
232 In [9]: t = client.Task("a = bigfunc()", depend=dep)
233
234 It is important to note that assignment of values to the properties dict is done entirely by the user, either locally (in the engine) using the EngineAPI, or remotely, through the ``MultiEngineClient``'s get/set_properties methods.
235
236
237
238
239
240
@@ -0,0 +1,33
1 =========================================
2 Notes on the IPython configuration system
3 =========================================
4
5 This document has some random notes on the configuration system.
6
7 To start, an IPython process needs:
8
9 * Configuration files
10 * Command line options
11 * Additional files (FURL files, extra scripts, etc.)
12
13 It feeds these things into the core logic of the process, and as output,
14 produces:
15
16 * Log files
17 * Security files
18
19 There are a number of things that complicate this:
20
21 * A process may need to be started on a different host that doesn't have
22 any of the config files or additional files. Those files need to be
23 moved over and put in a staging area. The process then needs to be told
24 about them.
25 * The location of the output files should somehow be set by config files or
26 command line options.
27 * Our config files are very hierarchical, but command line options are flat,
28 making it difficult to relate command line options to config files.
29 * Some processes (like ipcluster and the daemons) have to manage the input and
30 output files for multiple different subprocesses, each possibly on a
31 different host. Ahhhh!
32 * Our configurations are not singletons. A given user will likely have
33 many different configurations for different clusters.
@@ -0,0 +1,217
1 Overview
2 ========
3
4 This document describes the steps required to install IPython. IPython is organized into a number of subpackages, each of which has its own dependencies. All of the subpackages come with IPython, so you don't need to download and install them separately. However, to use a given subpackage, you will need to install all of its dependencies.
5
6
7 Please let us know if you have problems installing IPython or any of its
8 dependencies. IPython requires Python version 2.4 or greater. We have not tested
9 IPython with the upcoming 2.6 or 3.0 versions.
10
11 .. warning::
12
13 IPython will not work with Python 2.3 or below.
14
15 Some of the installation approaches use the :mod:`setuptools` package and its :command:`easy_install` command line program. In many scenarios, this provides the most simple method of installing IPython and its dependencies. It is not required though. More information about :mod:`setuptools` can be found on its website.
16
17 More general information about installing Python packages can be found in Python's documentation at http://www.python.org/doc/.
18
19 Quickstart
20 ==========
21
22 If you have :mod:`setuptools` installed and you are on OS X or Linux (not Windows), the following will download and install IPython *and* the main optional dependencies::
23
24 $ easy_install ipython[kernel,security,test]
25
26 This will get Twisted, zope.interface and Foolscap, which are needed for IPython's parallel computing features as well as the nose package, which will enable you to run IPython's test suite. To run IPython's test suite, use the :command:`iptest` command::
27
28 $ iptest
29
30 Read on for more specific details and instructions for Windows.
31
32 Installing IPython itself
33 =========================
34
35 Given a properly built Python, the basic interactive IPython shell will work with no external dependencies. However, some Python distributions (particularly on Windows and OS X), don't come with a working :mod:`readline` module. The IPython shell will work without :mod:`readline`, but will lack many features that users depend on, such as tab completion and command line editing. See below for details of how to make sure you have a working :mod:`readline`.
36
37 Installation using easy_install
38 -------------------------------
39
40 If you have :mod:`setuptools` installed, the easiest way of getting IPython is to simple use :command:`easy_install`::
41
42 $ easy_install ipython
43
44 That's it.
45
46 Installation from source
47 ------------------------
48
49 If you don't want to use :command:`easy_install`, or don't have it installed, just grab the latest stable build of IPython from `here <http://ipython.scipy.org/dist/>`_. Then do the following::
50
51 $ tar -xzf ipython.tar.gz
52 $ cd ipython
53 $ python setup.py install
54
55 If you are installing to a location (like ``/usr/local``) that requires higher permissions, you may need to run the last command with :command:`sudo`.
56
57 Windows
58 -------
59
60 There are a few caveats for Windows users. The main issue is that a basic ``python setup.py install`` approach won't create ``.bat`` file or Start Menu shortcuts, which most users want. To get an installation with these, there are two choices:
61
62 1. Install using :command:`easy_install`.
63
64 2. Install using our binary ``.exe`` Windows installer, which can be found at `here <http://ipython.scipy.org/dist/>`_
65
66 3. Install from source, but using :mod:`setuptools` (``python setupegg.py install``).
67
68 Installing the development version
69 ----------------------------------
70
71 It is also possible to install the development version of IPython from our `Bazaar <http://bazaar-vcs.org/>`_ source code
72 repository. To do this you will need to have Bazaar installed on your system. Then just do::
73
74 $ bzr branch lp:ipython
75 $ cd ipython
76 $ python setup.py install
77
78 Again, this last step on Windows won't create ``.bat`` files or Start Menu shortcuts, so you will have to use one of the other approaches listed above.
79
80 Some users want to be able to follow the development branch as it changes. If you have :mod:`setuptools` installed, this is easy. Simply replace the last step by::
81
82 $ python setupegg.py develop
83
84 This creates links in the right places and installs the command line script to the appropriate places. Then, if you want to update your IPython at any time, just do::
85
86 $ bzr pull
87
88 Basic optional dependencies
89 ===========================
90
91 There are a number of basic optional dependencies that most users will want to get. These are:
92
93 * readline (for command line editing, tab completion, etc.)
94 * nose (to run the IPython test suite)
95 * pexpect (to use things like irunner)
96
97 If you are comfortable installing these things yourself, have at it, otherwise read on for more details.
98
99 readline
100 --------
101
102 In principle, all Python distributions should come with a working :mod:`readline` module. But, reality is not quite that simple. There are two common situations where you won't have a working :mod:`readline` module:
103
104 * If you are using the built-in Python on Mac OS X.
105
106 * If you are running Windows, which doesn't have a :mod:`readline` module.
107
108 On OS X, the built-in Python doesn't not have :mod:`readline` because of license issues. Starting with OS X 10.5 (Leopard), Apple's built-in Python has a BSD-licensed not-quite-compatible readline replacement. As of IPython 0.9, many of the issues related to the differences between readline and libedit have been resolved. For many users, libedit may be sufficient.
109
110 Most users on OS X will want to get the full :mod:`readline` module. To get a working :mod:`readline` module, just do (with :mod:`setuptools` installed)::
111
112 $ easy_install readline
113
114 .. note:
115
116 Other Python distributions on OS X (such as fink, MacPorts and the
117 official python.org binaries) already have readline installed so
118 you don't have to do this step.
119
120 If needed, the readline egg can be build and installed from source (see the wiki page at http://ipython.scipy.org/moin/InstallationOSXLeopard).
121
122 On Windows, you will need the PyReadline module. PyReadline is a separate,
123 Windows only implementation of readline that uses native Windows calls through
124 :mod:`ctypes`. The easiest way of installing PyReadline is you use the binary
125 installer available `here <http://ipython.scipy.org/dist/>`_. The
126 :mod:`ctypes` module, which comes with Python 2.5 and greater, is required by
127 PyReadline. It is available for Python 2.4 at
128 http://python.net/crew/theller/ctypes.
129
130 nose
131 ----
132
133 To run the IPython test suite you will need the :mod:`nose` package. Nose provides a great way of sniffing out and running all of the IPython tests. The simplest way of getting nose, is to use :command:`easy_install`::
134
135 $ easy_install nose
136
137 Another way of getting this is to do::
138
139 $ easy_install ipython[test]
140
141 For more installation options, see the `nose website <http://somethingaboutorange.com/mrl/projects/nose/>`_. Once you have nose installed, you can run IPython's test suite using the iptest command::
142
143 $ iptest
144
145
146 pexpect
147 -------
148
149 The `pexpect <http://www.noah.org/wiki/Pexpect>`_ package is used in IPython's :command:`irunner` script. On Unix platforms (including OS X), just do::
150
151 $ easy_install pexpect
152
153 Windows users are out of luck as pexpect does not run there.
154
155 Dependencies for IPython.kernel (parallel computing)
156 ====================================================
157
158 The IPython kernel provides a nice architecture for parallel computing. The main focus of this architecture is on interactive parallel computing. These features require a number of additional packages:
159
160 * zope.interface (yep, we use interfaces)
161 * Twisted (asynchronous networking framework)
162 * Foolscap (a nice, secure network protocol)
163 * pyOpenSSL (security for network connections)
164
165 On a Unix style platform (including OS X), if you want to use :mod:`setuptools`, you can just do::
166
167 $ easy_install ipython[kernel] # the first three
168 $ easy_install ipython[security] # pyOpenSSL
169
170 zope.interface and Twisted
171 --------------------------
172
173 Twisted [Twisted]_ and zope.interface [ZopeInterface]_ are used for networking related things. On Unix
174 style platforms (including OS X), the simplest way of getting the these is to
175 use :command:`easy_install`::
176
177 $ easy_install zope.interface
178 $ easy_install Twisted
179
180 Of course, you can also download the source tarballs from the `Twisted website <twistedmatrix.org>`_ and the `zope.interface page at PyPI <http://pypi.python.org/pypi/zope.interface>`_ and do the usual ``python setup.py install`` if you prefer.
181
182 Windows is a bit different. For zope.interface and Twisted, simply get the latest binary ``.exe`` installer from the Twisted website. This installer includes both zope.interface and Twisted and should just work.
183
184 Foolscap
185 --------
186
187 Foolscap [Foolscap]_ uses Twisted to provide a very nice secure RPC protocol that we use to implement our parallel computing features.
188
189 On all platforms a simple::
190
191 $ easy_install foolscap
192
193 should work. You can also download the source tarballs from the `Foolscap website <http://foolscap.lothar.com/trac>`_ and do ``python setup.py install`` if you prefer.
194
195 pyOpenSSL
196 ---------
197
198 IPython requires an older version of pyOpenSSL [pyOpenSSL]_ (0.6 rather than the current 0.7). There are a couple of options for getting this:
199
200 1. Most Linux distributions have packages for pyOpenSSL.
201 2. The built-in Python 2.5 on OS X 10.5 already has it installed.
202 3. There are source tarballs on the pyOpenSSL website. On Unix-like
203 platforms, these can be built using ``python seutp.py install``.
204 4. There is also a binary ``.exe`` Windows installer on the `pyOpenSSL website <http://pyopenssl.sourceforge.net/>`_.
205
206 Dependencies for IPython.frontend (the IPython GUI)
207 ===================================================
208
209 wxPython
210 --------
211
212 Starting with IPython 0.9, IPython has a new IPython.frontend package that has a nice wxPython based IPython GUI. As you would expect, this GUI requires wxPython. Most Linux distributions have wxPython packages available and the built-in Python on OS X comes with wxPython preinstalled. For Windows, a binary installer is available on the `wxPython website <http://www.wxpython.org/>`_.
213
214 .. [Twisted] Twisted matrix. http://twistedmatrix.org
215 .. [ZopeInterface] http://pypi.python.org/pypi/zope.interface
216 .. [Foolscap] Foolscap network protocol. http://foolscap.lothar.com/trac
217 .. [pyOpenSSL] pyOpenSSL. http://pyopenssl.sourceforge.net No newline at end of file
@@ -0,0 +1,251
1 .. _parallel_process:
2
3 ===========================================
4 Starting the IPython controller and engines
5 ===========================================
6
7 To use IPython for parallel computing, you need to start one instance of
8 the controller and one or more instances of the engine. The controller
9 and each engine can run on different machines or on the same machine.
10 Because of this, there are many different possibilities.
11
12 Broadly speaking, there are two ways of going about starting a controller and engines:
13
14 * In an automated manner using the :command:`ipcluster` command.
15 * In a more manual way using the :command:`ipcontroller` and
16 :command:`ipengine` commands.
17
18 This document describes both of these methods. We recommend that new users start with the :command:`ipcluster` command as it simplifies many common usage cases.
19
20 General considerations
21 ======================
22
23 Before delving into the details about how you can start a controller and engines using the various methods, we outline some of the general issues that come up when starting the controller and engines. These things come up no matter which method you use to start your IPython cluster.
24
25 Let's say that you want to start the controller on ``host0`` and engines on hosts ``host1``-``hostn``. The following steps are then required:
26
27 1. Start the controller on ``host0`` by running :command:`ipcontroller` on
28 ``host0``.
29 2. Move the FURL file (:file:`ipcontroller-engine.furl`) created by the
30 controller from ``host0`` to hosts ``host1``-``hostn``.
31 3. Start the engines on hosts ``host1``-``hostn`` by running
32 :command:`ipengine`. This command has to be told where the FURL file
33 (:file:`ipcontroller-engine.furl`) is located.
34
35 At this point, the controller and engines will be connected. By default, the
36 FURL files created by the controller are put into the
37 :file:`~/.ipython/security` directory. If the engines share a filesystem with
38 the controller, step 2 can be skipped as the engines will automatically look
39 at that location.
40
41 The final step required required to actually use the running controller from a
42 client is to move the FURL files :file:`ipcontroller-mec.furl` and
43 :file:`ipcontroller-tc.furl` from ``host0`` to the host where the clients will
44 be run. If these file are put into the :file:`~/.ipython/security` directory of the client's host, they will be found automatically. Otherwise, the full path to them has to be passed to the client's constructor.
45
46 Using :command:`ipcluster`
47 ==========================
48
49 The :command:`ipcluster` command provides a simple way of starting a controller and engines in the following situations:
50
51 1. When the controller and engines are all run on localhost. This is useful
52 for testing or running on a multicore computer.
53 2. When engines are started using the :command:`mpirun` command that comes
54 with most MPI [MPI]_ implementations
55 3. When engines are started using the PBS [PBS]_ batch system.
56
57 .. note::
58
59 It is also possible for advanced users to add support to
60 :command:`ipcluster` for starting controllers and engines using other
61 methods (like Sun's Grid Engine for example).
62
63 .. note::
64
65 Currently :command:`ipcluster` requires that the
66 :file:`~/.ipython/security` directory live on a shared filesystem that is
67 seen by both the controller and engines. If you don't have a shared file
68 system you will need to use :command:`ipcontroller` and
69 :command:`ipengine` directly.
70
71 Underneath the hood, :command:`ipcluster` just uses :command:`ipcontroller`
72 and :command:`ipengine` to perform the steps described above.
73
74 Using :command:`ipcluster` in local mode
75 ----------------------------------------
76
77 To start one controller and 4 engines on localhost, just do::
78
79 $ ipcluster local -n 4
80
81 To see other command line options for the local mode, do::
82
83 $ ipcluster local -h
84
85 Using :command:`ipcluster` in mpirun mode
86 -----------------------------------------
87
88 The mpirun mode is useful if you:
89
90 1. Have MPI installed.
91 2. Your systems are configured to use the :command:`mpirun` command to start
92 processes.
93
94 If these are satisfied, you can start an IPython cluster using::
95
96 $ ipcluster mpirun -n 4
97
98 This does the following:
99
100 1. Starts the IPython controller on current host.
101 2. Uses :command:`mpirun` to start 4 engines.
102
103 On newer MPI implementations (such as OpenMPI), this will work even if you don't make any calls to MPI or call :func:`MPI_Init`. However, older MPI implementations actually require each process to call :func:`MPI_Init` upon starting. The easiest way of having this done is to install the mpi4py [mpi4py]_ package and then call ipcluster with the ``--mpi`` option::
104
105 $ ipcluster mpirun -n 4 --mpi=mpi4py
106
107 Unfortunately, even this won't work for some MPI implementations. If you are having problems with this, you will likely have to use a custom Python executable that itself calls :func:`MPI_Init` at the appropriate time. Fortunately, mpi4py comes with such a custom Python executable that is easy to install and use. However, this custom Python executable approach will not work with :command:`ipcluster` currently.
108
109 Additional command line options for this mode can be found by doing::
110
111 $ ipcluster mpirun -h
112
113 More details on using MPI with IPython can be found :ref:`here <parallelmpi>`.
114
115
116 Using :command:`ipcluster` in PBS mode
117 --------------------------------------
118
119 The PBS mode uses the Portable Batch System [PBS]_ to start the engines. To use this mode, you first need to create a PBS script template that will be used to start the engines. Here is a sample PBS script template:
120
121 .. sourcecode:: bash
122
123 #PBS -N ipython
124 #PBS -j oe
125 #PBS -l walltime=00:10:00
126 #PBS -l nodes=${n/4}:ppn=4
127 #PBS -q parallel
128
129 cd $$PBS_O_WORKDIR
130 export PATH=$$HOME/usr/local/bin
131 export PYTHONPATH=$$HOME/usr/local/lib/python2.4/site-packages
132 /usr/local/bin/mpiexec -n ${n} ipengine --logfile=$$PBS_O_WORKDIR/ipengine
133
134 There are a few important points about this template:
135
136 1. This template will be rendered at runtime using IPython's :mod:`Itpl`
137 template engine.
138
139 2. Instead of putting in the actual number of engines, use the notation
140 ``${n}`` to indicate the number of engines to be started. You can also uses
141 expressions like ``${n/4}`` in the template to indicate the number of
142 nodes.
143
144 3. Because ``$`` is a special character used by the template engine, you must
145 escape any ``$`` by using ``$$``. This is important when referring to
146 environment variables in the template.
147
148 4. Any options to :command:`ipengine` should be given in the batch script
149 template.
150
151 5. Depending on the configuration of you system, you may have to set
152 environment variables in the script template.
153
154 Once you have created such a script, save it with a name like :file:`pbs.template`. Now you are ready to start your job::
155
156 $ ipcluster pbs -n 128 --pbs-script=pbs.template
157
158 Additional command line options for this mode can be found by doing::
159
160 $ ipcluster pbs -h
161
162 Using the :command:`ipcontroller` and :command:`ipengine` commands
163 ==================================================================
164
165 It is also possible to use the :command:`ipcontroller` and :command:`ipengine` commands to start your controller and engines. This approach gives you full control over all aspects of the startup process.
166
167 Starting the controller and engine on your local machine
168 --------------------------------------------------------
169
170 To use :command:`ipcontroller` and :command:`ipengine` to start things on your
171 local machine, do the following.
172
173 First start the controller::
174
175 $ ipcontroller
176
177 Next, start however many instances of the engine you want using (repeatedly) the command::
178
179 $ ipengine
180
181 The engines should start and automatically connect to the controller using the FURL files in :file:`~./ipython/security`. You are now ready to use the controller and engines from IPython.
182
183 .. warning::
184
185 The order of the above operations is very important. You *must*
186 start the controller before the engines, since the engines connect
187 to the controller as they get started.
188
189 .. note::
190
191 On some platforms (OS X), to put the controller and engine into the
192 background you may need to give these commands in the form ``(ipcontroller
193 &)`` and ``(ipengine &)`` (with the parentheses) for them to work
194 properly.
195
196 Starting the controller and engines on different hosts
197 ------------------------------------------------------
198
199 When the controller and engines are running on different hosts, things are
200 slightly more complicated, but the underlying ideas are the same:
201
202 1. Start the controller on a host using :command:`ipcontroller`.
203 2. Copy :file:`ipcontroller-engine.furl` from :file:`~./ipython/security` on the controller's host to the host where the engines will run.
204 3. Use :command:`ipengine` on the engine's hosts to start the engines.
205
206 The only thing you have to be careful of is to tell :command:`ipengine` where the :file:`ipcontroller-engine.furl` file is located. There are two ways you can do this:
207
208 * Put :file:`ipcontroller-engine.furl` in the :file:`~./ipython/security`
209 directory on the engine's host, where it will be found automatically.
210 * Call :command:`ipengine` with the ``--furl-file=full_path_to_the_file``
211 flag.
212
213 The ``--furl-file`` flag works like this::
214
215 $ ipengine --furl-file=/path/to/my/ipcontroller-engine.furl
216
217 .. note::
218
219 If the controller's and engine's hosts all have a shared file system
220 (:file:`~./ipython/security` is the same on all of them), then things
221 will just work!
222
223 Make FURL files persistent
224 ---------------------------
225
226 At fist glance it may seem that that managing the FURL files is a bit annoying. Going back to the house and key analogy, copying the FURL around each time you start the controller is like having to make a new key every time you want to unlock the door and enter your house. As with your house, you want to be able to create the key (or FURL file) once, and then simply use it at any point in the future.
227
228 This is possible. The only thing you have to do is decide what ports the controller will listen on for the engines and clients. This is done as follows::
229
230 $ ipcontroller -r --client-port=10101 --engine-port=10102
231
232 Then, just copy the furl files over the first time and you are set. You can start and stop the controller and engines any many times as you want in the future, just make sure to tell the controller to use the *same* ports.
233
234 .. note::
235
236 You may ask the question: what ports does the controller listen on if you
237 don't tell is to use specific ones? The default is to use high random port
238 numbers. We do this for two reasons: i) to increase security through
239 obscurity and ii) to multiple controllers on a given host to start and
240 automatically use different ports.
241
242 Log files
243 ---------
244
245 All of the components of IPython have log files associated with them.
246 These log files can be extremely useful in debugging problems with
247 IPython and can be found in the directory :file:`~/.ipython/log`. Sending
248 the log files to us will often help us to debug any problems.
249
250
251 .. [PBS] Portable Batch System. http://www.openpbs.org/
@@ -0,0 +1,363
1 .. _parallelsecurity:
2
3 ===========================
4 Security details of IPython
5 ===========================
6
7 IPython's :mod:`IPython.kernel` package exposes the full power of the Python
8 interpreter over a TCP/IP network for the purposes of parallel computing. This
9 feature brings up the important question of IPython's security model. This
10 document gives details about this model and how it is implemented in IPython's
11 architecture.
12
13 Processs and network topology
14 =============================
15
16 To enable parallel computing, IPython has a number of different processes that
17 run. These processes are discussed at length in the IPython documentation and
18 are summarized here:
19
20 * The IPython *engine*. This process is a full blown Python
21 interpreter in which user code is executed. Multiple
22 engines are started to make parallel computing possible.
23 * The IPython *controller*. This process manages a set of
24 engines, maintaining a queue for each and presenting
25 an asynchronous interface to the set of engines.
26 * The IPython *client*. This process is typically an
27 interactive Python process that is used to coordinate the
28 engines to get a parallel computation done.
29
30 Collectively, these three processes are called the IPython *kernel*.
31
32 These three processes communicate over TCP/IP connections with a well defined
33 topology. The IPython controller is the only process that listens on TCP/IP
34 sockets. Upon starting, an engine connects to a controller and registers
35 itself with the controller. These engine/controller TCP/IP connections persist
36 for the lifetime of each engine.
37
38 The IPython client also connects to the controller using one or more TCP/IP
39 connections. These connections persist for the lifetime of the client only.
40
41 A given IPython controller and set of engines typically has a relatively short
42 lifetime. Typically this lifetime corresponds to the duration of a single
43 parallel simulation performed by a single user. Finally, the controller,
44 engines and client processes typically execute with the permissions of that
45 same user. More specifically, the controller and engines are *not* executed as
46 root or with any other superuser permissions.
47
48 Application logic
49 =================
50
51 When running the IPython kernel to perform a parallel computation, a user
52 utilizes the IPython client to send Python commands and data through the
53 IPython controller to the IPython engines, where those commands are executed
54 and the data processed. The design of IPython ensures that the client is the
55 only access point for the capabilities of the engines. That is, the only way of addressing the engines is through a client.
56
57 A user can utilize the client to instruct the IPython engines to execute
58 arbitrary Python commands. These Python commands can include calls to the
59 system shell, access the filesystem, etc., as required by the user's
60 application code. From this perspective, when a user runs an IPython engine on
61 a host, that engine has the same capabilities and permissions as the user
62 themselves (as if they were logged onto the engine's host with a terminal).
63
64 Secure network connections
65 ==========================
66
67 Overview
68 --------
69
70 All TCP/IP connections between the client and controller as well as the
71 engines and controller are fully encrypted and authenticated. This section
72 describes the details of the encryption and authentication approached used
73 within IPython.
74
75 IPython uses the Foolscap network protocol [Foolscap]_ for all communications
76 between processes. Thus, the details of IPython's security model are directly
77 related to those of Foolscap. Thus, much of the following discussion is
78 actually just a discussion of the security that is built in to Foolscap.
79
80 Encryption
81 ----------
82
83 For encryption purposes, IPython and Foolscap use the well known Secure Socket
84 Layer (SSL) protocol [RFC5246]_. We use the implementation of this protocol
85 provided by the OpenSSL project through the pyOpenSSL [pyOpenSSL]_ Python
86 bindings to OpenSSL.
87
88 Authentication
89 --------------
90
91 IPython clients and engines must also authenticate themselves with the
92 controller. This is handled in a capabilities based security model
93 [Capability]_. In this model, the controller creates a strong cryptographic
94 key or token that represents each set of capability that the controller
95 offers. Any party who has this key and presents it to the controller has full
96 access to the corresponding capabilities of the controller. This model is
97 analogous to using a physical key to gain access to physical items
98 (capabilities) behind a locked door.
99
100 For a capabilities based authentication system to prevent unauthorized access,
101 two things must be ensured:
102
103 * The keys must be cryptographically strong. Otherwise attackers could gain
104 access by a simple brute force key guessing attack.
105 * The actual keys must be distributed only to authorized parties.
106
107 The keys in Foolscap are called Foolscap URL's or FURLs. The following section
108 gives details about how these FURLs are created in Foolscap. The IPython
109 controller creates a number of FURLs for different purposes:
110
111 * One FURL that grants IPython engines access to the controller. Also
112 implicit in this access is permission to execute code sent by an
113 authenticated IPython client.
114 * Two or more FURLs that grant IPython clients access to the controller.
115 Implicit in this access is permission to give the controller's engine code
116 to execute.
117
118 Upon starting, the controller creates these different FURLS and writes them
119 files in the user-read-only directory :file:`$HOME/.ipython/security`. Thus, only the
120 user who starts the controller has access to the FURLs.
121
122 For an IPython client or engine to authenticate with a controller, it must
123 present the appropriate FURL to the controller upon connecting. If the
124 FURL matches what the controller expects for a given capability, access is
125 granted. If not, access is denied. The exchange of FURLs is done after
126 encrypted communications channels have been established to prevent attackers
127 from capturing them.
128
129 .. note::
130
131 The FURL is similar to an unsigned private key in SSH.
132
133 Details of the Foolscap handshake
134 ---------------------------------
135
136 In this section we detail the precise security handshake that takes place at
137 the beginning of any network connection in IPython. For the purposes of this
138 discussion, the SERVER is the IPython controller process and the CLIENT is the
139 IPython engine or client process.
140
141 Upon starting, all IPython processes do the following:
142
143 1. Create a public key x509 certificate (ISO/IEC 9594).
144 2. Create a hash of the contents of the certificate using the SHA-1 algorithm.
145 The base-32 encoded version of this hash is saved by the process as its
146 process id (actually in Foolscap, this is the Tub id, but here refer to
147 it as the process id).
148
149 Upon starting, the IPython controller also does the following:
150
151 1. Save the x509 certificate to disk in a secure location. The CLIENT
152 certificate is never saved to disk.
153 2. Create a FURL for each capability that the controller has. There are
154 separate capabilities the controller offers for clients and engines. The
155 FURL is created using: a) the process id of the SERVER, b) the IP
156 address and port the SERVER is listening on and c) a 160 bit,
157 cryptographically secure string that represents the capability (the
158 "capability id").
159 3. The FURLs are saved to disk in a secure location on the SERVER's host.
160
161 For a CLIENT to be able to connect to the SERVER and access a capability of
162 that SERVER, the CLIENT must have knowledge of the FURL for that SERVER's
163 capability. This typically requires that the file containing the FURL be
164 moved from the SERVER's host to the CLIENT's host. This is done by the end
165 user who started the SERVER and wishes to have a CLIENT connect to the SERVER.
166
167 When a CLIENT connects to the SERVER, the following handshake protocol takes
168 place:
169
170 1. The CLIENT tells the SERVER what process (or Tub) id it expects the SERVER
171 to have.
172 2. If the SERVER has that process id, it notifies the CLIENT that it will now
173 enter encrypted mode. If the SERVER has a different id, the SERVER aborts.
174 3. Both CLIENT and SERVER initiate the SSL handshake protocol.
175 4. Both CLIENT and SERVER request the certificate of their peer and verify
176 that certificate. If this succeeds, all further communications are
177 encrypted.
178 5. Both CLIENT and SERVER send a hello block containing connection parameters
179 and their process id.
180 6. The CLIENT and SERVER check that their peer's stated process id matches the
181 hash of the x509 certificate the peer presented. If not, the connection is
182 aborted.
183 7. The CLIENT verifies that the SERVER's stated id matches the id of the
184 SERVER the CLIENT is intending to connect to. If not, the connection is
185 aborted.
186 8. The CLIENT and SERVER elect a master who decides on the final connection
187 parameters.
188
189 The public/private key pair associated with each process's x509 certificate
190 are completely hidden from this handshake protocol. There are however, used
191 internally by OpenSSL as part of the SSL handshake protocol. Each process
192 keeps their own private key hidden and sends its peer only the public key
193 (embedded in the certificate).
194
195 Finally, when the CLIENT requests access to a particular SERVER capability,
196 the following happens:
197
198 1. The CLIENT asks the SERVER for access to a capability by presenting that
199 capabilities id.
200 2. If the SERVER has a capability with that id, access is granted. If not,
201 access is not granted.
202 3. Once access has been gained, the CLIENT can use the capability.
203
204 Specific security vulnerabilities
205 =================================
206
207 There are a number of potential security vulnerabilities present in IPython's
208 architecture. In this section we discuss those vulnerabilities and detail how
209 the security architecture described above prevents them from being exploited.
210
211 Unauthorized clients
212 --------------------
213
214 The IPython client can instruct the IPython engines to execute arbitrary
215 Python code with the permissions of the user who started the engines. If an
216 attacker were able to connect their own hostile IPython client to the IPython
217 controller, they could instruct the engines to execute code.
218
219 This attack is prevented by the capabilities based client authentication
220 performed after the encrypted channel has been established. The relevant
221 authentication information is encoded into the FURL that clients must
222 present to gain access to the IPython controller. By limiting the distribution
223 of those FURLs, a user can grant access to only authorized persons.
224
225 It is highly unlikely that a client FURL could be guessed by an attacker
226 in a brute force guessing attack. A given instance of the IPython controller
227 only runs for a relatively short amount of time (on the order of hours). Thus
228 an attacker would have only a limited amount of time to test a search space of
229 size 2**320. Furthermore, even if a controller were to run for a longer amount
230 of time, this search space is quite large (larger for instance than that of
231 typical username/password pair).
232
233 Unauthorized engines
234 --------------------
235
236 If an attacker were able to connect a hostile engine to a user's controller,
237 the user might unknowingly send sensitive code or data to the hostile engine.
238 This attacker's engine would then have full access to that code and data.
239
240 This type of attack is prevented in the same way as the unauthorized client
241 attack, through the usage of the capabilities based authentication scheme.
242
243 Unauthorized controllers
244 ------------------------
245
246 It is also possible that an attacker could try to convince a user's IPython
247 client or engine to connect to a hostile IPython controller. That controller
248 would then have full access to the code and data sent between the IPython
249 client and the IPython engines.
250
251 Again, this attack is prevented through the FURLs, which ensure that a
252 client or engine connects to the correct controller. It is also important to
253 note that the FURLs also encode the IP address and port that the
254 controller is listening on, so there is little chance of mistakenly connecting
255 to a controller running on a different IP address and port.
256
257 When starting an engine or client, a user must specify which FURL to use
258 for that connection. Thus, in order to introduce a hostile controller, the
259 attacker must convince the user to use the FURLs associated with the
260 hostile controller. As long as a user is diligent in only using FURLs from
261 trusted sources, this attack is not possible.
262
263 Other security measures
264 =======================
265
266 A number of other measures are taken to further limit the security risks
267 involved in running the IPython kernel.
268
269 First, by default, the IPython controller listens on random port numbers.
270 While this can be overridden by the user, in the default configuration, an
271 attacker would have to do a port scan to even find a controller to attack.
272 When coupled with the relatively short running time of a typical controller
273 (on the order of hours), an attacker would have to work extremely hard and
274 extremely *fast* to even find a running controller to attack.
275
276 Second, much of the time, especially when run on supercomputers or clusters,
277 the controller is running behind a firewall. Thus, for engines or client to
278 connect to the controller:
279
280 * The different processes have to all be behind the firewall.
281
282 or:
283
284 * The user has to use SSH port forwarding to tunnel the
285 connections through the firewall.
286
287 In either case, an attacker is presented with addition barriers that prevent
288 attacking or even probing the system.
289
290 Summary
291 =======
292
293 IPython's architecture has been carefully designed with security in mind. The
294 capabilities based authentication model, in conjunction with the encrypted
295 TCP/IP channels, address the core potential vulnerabilities in the system,
296 while still enabling user's to use the system in open networks.
297
298 Other questions
299 ===============
300
301 About keys
302 ----------
303
304 Can you clarify the roles of the certificate and its keys versus the FURL,
305 which is also called a key?
306
307 The certificate created by IPython processes is a standard public key x509
308 certificate, that is used by the SSL handshake protocol to setup encrypted
309 channel between the controller and the IPython engine or client. This public
310 and private key associated with this certificate are used only by the SSL
311 handshake protocol in setting up this encrypted channel.
312
313 The FURL serves a completely different and independent purpose from the
314 key pair associated with the certificate. When we refer to a FURL as a
315 key, we are using the word "key" in the capabilities based security model
316 sense. This has nothing to do with "key" in the public/private key sense used
317 in the SSL protocol.
318
319 With that said the FURL is used as an cryptographic key, to grant
320 IPython engines and clients access to particular capabilities that the
321 controller offers.
322
323 Self signed certificates
324 ------------------------
325
326 Is the controller creating a self-signed certificate? Is this created for per
327 instance/session, one-time-setup or each-time the controller is started?
328
329 The Foolscap network protocol, which handles the SSL protocol details, creates
330 a self-signed x509 certificate using OpenSSL for each IPython process. The
331 lifetime of the certificate is handled differently for the IPython controller
332 and the engines/client.
333
334 For the IPython engines and client, the certificate is only held in memory for
335 the lifetime of its process. It is never written to disk.
336
337 For the controller, the certificate can be created anew each time the
338 controller starts or it can be created once and reused each time the
339 controller starts. If at any point, the certificate is deleted, a new one is
340 created the next time the controller starts.
341
342 SSL private key
343 ---------------
344
345 How the private key (associated with the certificate) is distributed?
346
347 In the usual implementation of the SSL protocol, the private key is never
348 distributed. We follow this standard always.
349
350 SSL versus Foolscap authentication
351 ----------------------------------
352
353 Many SSL connections only perform one sided authentication (the server to the
354 client). How is the client authentication in IPython's system related to SSL
355 authentication?
356
357 We perform a two way SSL handshake in which both parties request and verify
358 the certificate of their peer. This mutual authentication is handled by the
359 SSL handshake and is separate and independent from the additional
360 authentication steps that the CLIENT and SERVER perform after an encrypted
361 channel is established.
362
363 .. [RFC5246] <http://tools.ietf.org/html/rfc5246>
@@ -0,0 +1,423
1 """
2 Defines a docutils directive for inserting inheritance diagrams.
3
4 Provide the directive with one or more classes or modules (separated
5 by whitespace). For modules, all of the classes in that module will
6 be used.
7
8 Example::
9
10 Given the following classes:
11
12 class A: pass
13 class B(A): pass
14 class C(A): pass
15 class D(B, C): pass
16 class E(B): pass
17
18 .. inheritance-diagram: D E
19
20 Produces a graph like the following:
21
22 A
23 / \
24 B C
25 / \ /
26 E D
27
28 The graph is inserted as a PNG+image map into HTML and a PDF in
29 LaTeX.
30 """
31
32 import inspect
33 import os
34 import re
35 import subprocess
36 try:
37 from hashlib import md5
38 except ImportError:
39 from md5 import md5
40
41 from docutils.nodes import Body, Element
42 from docutils.writers.html4css1 import HTMLTranslator
43 from sphinx.latexwriter import LaTeXTranslator
44 from docutils.parsers.rst import directives
45 from sphinx.roles import xfileref_role
46
47 class DotException(Exception):
48 pass
49
50 class InheritanceGraph(object):
51 """
52 Given a list of classes, determines the set of classes that
53 they inherit from all the way to the root "object", and then
54 is able to generate a graphviz dot graph from them.
55 """
56 def __init__(self, class_names, show_builtins=False):
57 """
58 *class_names* is a list of child classes to show bases from.
59
60 If *show_builtins* is True, then Python builtins will be shown
61 in the graph.
62 """
63 self.class_names = class_names
64 self.classes = self._import_classes(class_names)
65 self.all_classes = self._all_classes(self.classes)
66 if len(self.all_classes) == 0:
67 raise ValueError("No classes found for inheritance diagram")
68 self.show_builtins = show_builtins
69
70 py_sig_re = re.compile(r'''^([\w.]*\.)? # class names
71 (\w+) \s* $ # optionally arguments
72 ''', re.VERBOSE)
73
74 def _import_class_or_module(self, name):
75 """
76 Import a class using its fully-qualified *name*.
77 """
78 try:
79 path, base = self.py_sig_re.match(name).groups()
80 except:
81 raise ValueError(
82 "Invalid class or module '%s' specified for inheritance diagram" % name)
83 fullname = (path or '') + base
84 path = (path and path.rstrip('.'))
85 if not path:
86 path = base
87 if not path:
88 raise ValueError(
89 "Invalid class or module '%s' specified for inheritance diagram" % name)
90 try:
91 module = __import__(path, None, None, [])
92 except ImportError:
93 raise ValueError(
94 "Could not import class or module '%s' specified for inheritance diagram" % name)
95
96 try:
97 todoc = module
98 for comp in fullname.split('.')[1:]:
99 todoc = getattr(todoc, comp)
100 except AttributeError:
101 raise ValueError(
102 "Could not find class or module '%s' specified for inheritance diagram" % name)
103
104 # If a class, just return it
105 if inspect.isclass(todoc):
106 return [todoc]
107 elif inspect.ismodule(todoc):
108 classes = []
109 for cls in todoc.__dict__.values():
110 if inspect.isclass(cls) and cls.__module__ == todoc.__name__:
111 classes.append(cls)
112 return classes
113 raise ValueError(
114 "'%s' does not resolve to a class or module" % name)
115
116 def _import_classes(self, class_names):
117 """
118 Import a list of classes.
119 """
120 classes = []
121 for name in class_names:
122 classes.extend(self._import_class_or_module(name))
123 return classes
124
125 def _all_classes(self, classes):
126 """
127 Return a list of all classes that are ancestors of *classes*.
128 """
129 all_classes = {}
130
131 def recurse(cls):
132 all_classes[cls] = None
133 for c in cls.__bases__:
134 if c not in all_classes:
135 recurse(c)
136
137 for cls in classes:
138 recurse(cls)
139
140 return all_classes.keys()
141
142 def class_name(self, cls, parts=0):
143 """
144 Given a class object, return a fully-qualified name. This
145 works for things I've tested in matplotlib so far, but may not
146 be completely general.
147 """
148 module = cls.__module__
149 if module == '__builtin__':
150 fullname = cls.__name__
151 else:
152 fullname = "%s.%s" % (module, cls.__name__)
153 if parts == 0:
154 return fullname
155 name_parts = fullname.split('.')
156 return '.'.join(name_parts[-parts:])
157
158 def get_all_class_names(self):
159 """
160 Get all of the class names involved in the graph.
161 """
162 return [self.class_name(x) for x in self.all_classes]
163
164 # These are the default options for graphviz
165 default_graph_options = {
166 "rankdir": "LR",
167 "size": '"8.0, 12.0"'
168 }
169 default_node_options = {
170 "shape": "box",
171 "fontsize": 10,
172 "height": 0.25,
173 "fontname": "Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",
174 "style": '"setlinewidth(0.5)"'
175 }
176 default_edge_options = {
177 "arrowsize": 0.5,
178 "style": '"setlinewidth(0.5)"'
179 }
180
181 def _format_node_options(self, options):
182 return ','.join(["%s=%s" % x for x in options.items()])
183 def _format_graph_options(self, options):
184 return ''.join(["%s=%s;\n" % x for x in options.items()])
185
186 def generate_dot(self, fd, name, parts=0, urls={},
187 graph_options={}, node_options={},
188 edge_options={}):
189 """
190 Generate a graphviz dot graph from the classes that
191 were passed in to __init__.
192
193 *fd* is a Python file-like object to write to.
194
195 *name* is the name of the graph
196
197 *urls* is a dictionary mapping class names to http urls
198
199 *graph_options*, *node_options*, *edge_options* are
200 dictionaries containing key/value pairs to pass on as graphviz
201 properties.
202 """
203 g_options = self.default_graph_options.copy()
204 g_options.update(graph_options)
205 n_options = self.default_node_options.copy()
206 n_options.update(node_options)
207 e_options = self.default_edge_options.copy()
208 e_options.update(edge_options)
209
210 fd.write('digraph %s {\n' % name)
211 fd.write(self._format_graph_options(g_options))
212
213 for cls in self.all_classes:
214 if not self.show_builtins and cls in __builtins__.values():
215 continue
216
217 name = self.class_name(cls, parts)
218
219 # Write the node
220 this_node_options = n_options.copy()
221 url = urls.get(self.class_name(cls))
222 if url is not None:
223 this_node_options['URL'] = '"%s"' % url
224 fd.write(' "%s" [%s];\n' %
225 (name, self._format_node_options(this_node_options)))
226
227 # Write the edges
228 for base in cls.__bases__:
229 if not self.show_builtins and base in __builtins__.values():
230 continue
231
232 base_name = self.class_name(base, parts)
233 fd.write(' "%s" -> "%s" [%s];\n' %
234 (base_name, name,
235 self._format_node_options(e_options)))
236 fd.write('}\n')
237
238 def run_dot(self, args, name, parts=0, urls={},
239 graph_options={}, node_options={}, edge_options={}):
240 """
241 Run graphviz 'dot' over this graph, returning whatever 'dot'
242 writes to stdout.
243
244 *args* will be passed along as commandline arguments.
245
246 *name* is the name of the graph
247
248 *urls* is a dictionary mapping class names to http urls
249
250 Raises DotException for any of the many os and
251 installation-related errors that may occur.
252 """
253 try:
254 dot = subprocess.Popen(['dot'] + list(args),
255 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
256 close_fds=True)
257 except OSError:
258 raise DotException("Could not execute 'dot'. Are you sure you have 'graphviz' installed?")
259 except ValueError:
260 raise DotException("'dot' called with invalid arguments")
261 except:
262 raise DotException("Unexpected error calling 'dot'")
263
264 self.generate_dot(dot.stdin, name, parts, urls, graph_options,
265 node_options, edge_options)
266 dot.stdin.close()
267 result = dot.stdout.read()
268 returncode = dot.wait()
269 if returncode != 0:
270 raise DotException("'dot' returned the errorcode %d" % returncode)
271 return result
272
273 class inheritance_diagram(Body, Element):
274 """
275 A docutils node to use as a placeholder for the inheritance
276 diagram.
277 """
278 pass
279
280 def inheritance_diagram_directive_run(class_names, options, state):
281 """
282 Run when the inheritance_diagram directive is first encountered.
283 """
284 node = inheritance_diagram()
285
286 # Create a graph starting with the list of classes
287 graph = InheritanceGraph(class_names)
288
289 # Create xref nodes for each target of the graph's image map and
290 # add them to the doc tree so that Sphinx can resolve the
291 # references to real URLs later. These nodes will eventually be
292 # removed from the doctree after we're done with them.
293 for name in graph.get_all_class_names():
294 refnodes, x = xfileref_role(
295 'class', ':class:`%s`' % name, name, 0, state)
296 node.extend(refnodes)
297 # Store the graph object so we can use it to generate the
298 # dot file later
299 node['graph'] = graph
300 # Store the original content for use as a hash
301 node['parts'] = options.get('parts', 0)
302 node['content'] = " ".join(class_names)
303 return [node]
304
305 def get_graph_hash(node):
306 return md5(node['content'] + str(node['parts'])).hexdigest()[-10:]
307
308 def html_output_graph(self, node):
309 """
310 Output the graph for HTML. This will insert a PNG with clickable
311 image map.
312 """
313 graph = node['graph']
314 parts = node['parts']
315
316 graph_hash = get_graph_hash(node)
317 name = "inheritance%s" % graph_hash
318 png_path = os.path.join('_static', name + ".png")
319
320 path = '_static'
321 source = self.document.attributes['source']
322 count = source.split('/doc/')[-1].count('/')
323 for i in range(count):
324 if os.path.exists(path): break
325 path = '../'+path
326 path = '../'+path #specifically added for matplotlib
327
328 # Create a mapping from fully-qualified class names to URLs.
329 urls = {}
330 for child in node:
331 if child.get('refuri') is not None:
332 urls[child['reftitle']] = child.get('refuri')
333 elif child.get('refid') is not None:
334 urls[child['reftitle']] = '#' + child.get('refid')
335
336 # These arguments to dot will save a PNG file to disk and write
337 # an HTML image map to stdout.
338 image_map = graph.run_dot(['-Tpng', '-o%s' % png_path, '-Tcmapx'],
339 name, parts, urls)
340 return ('<img src="%s/%s.png" usemap="#%s" class="inheritance"/>%s' %
341 (path, name, name, image_map))
342
343 def latex_output_graph(self, node):
344 """
345 Output the graph for LaTeX. This will insert a PDF.
346 """
347 graph = node['graph']
348 parts = node['parts']
349
350 graph_hash = get_graph_hash(node)
351 name = "inheritance%s" % graph_hash
352 pdf_path = os.path.join('_static', name + ".pdf")
353
354 graph.run_dot(['-Tpdf', '-o%s' % pdf_path],
355 name, parts, graph_options={'size': '"6.0,6.0"'})
356 return '\\includegraphics{../../%s}' % pdf_path
357
358 def visit_inheritance_diagram(inner_func):
359 """
360 This is just a wrapper around html/latex_output_graph to make it
361 easier to handle errors and insert warnings.
362 """
363 def visitor(self, node):
364 try:
365 content = inner_func(self, node)
366 except DotException, e:
367 # Insert the exception as a warning in the document
368 warning = self.document.reporter.warning(str(e), line=node.line)
369 warning.parent = node
370 node.children = [warning]
371 else:
372 source = self.document.attributes['source']
373 self.body.append(content)
374 node.children = []
375 return visitor
376
377 def do_nothing(self, node):
378 pass
379
380 options_spec = {
381 'parts': directives.nonnegative_int
382 }
383
384 # Deal with the old and new way of registering directives
385 try:
386 from docutils.parsers.rst import Directive
387 except ImportError:
388 from docutils.parsers.rst.directives import _directives
389 def inheritance_diagram_directive(name, arguments, options, content, lineno,
390 content_offset, block_text, state,
391 state_machine):
392 return inheritance_diagram_directive_run(arguments, options, state)
393 inheritance_diagram_directive.__doc__ = __doc__
394 inheritance_diagram_directive.arguments = (1, 100, 0)
395 inheritance_diagram_directive.options = options_spec
396 inheritance_diagram_directive.content = 0
397 _directives['inheritance-diagram'] = inheritance_diagram_directive
398 else:
399 class inheritance_diagram_directive(Directive):
400 has_content = False
401 required_arguments = 1
402 optional_arguments = 100
403 final_argument_whitespace = False
404 option_spec = options_spec
405
406 def run(self):
407 return inheritance_diagram_directive_run(
408 self.arguments, self.options, self.state)
409 inheritance_diagram_directive.__doc__ = __doc__
410
411 directives.register_directive('inheritance-diagram',
412 inheritance_diagram_directive)
413
414 def setup(app):
415 app.add_node(inheritance_diagram)
416
417 HTMLTranslator.visit_inheritance_diagram = \
418 visit_inheritance_diagram(html_output_graph)
419 HTMLTranslator.depart_inheritance_diagram = do_nothing
420
421 LaTeXTranslator.visit_inheritance_diagram = \
422 visit_inheritance_diagram(latex_output_graph)
423 LaTeXTranslator.depart_inheritance_diagram = do_nothing
@@ -0,0 +1,75
1 from pygments.lexer import Lexer, do_insertions
2 from pygments.lexers.agile import PythonConsoleLexer, PythonLexer, \
3 PythonTracebackLexer
4 from pygments.token import Comment, Generic
5 from sphinx import highlighting
6 import re
7
8 line_re = re.compile('.*?\n')
9
10 class IPythonConsoleLexer(Lexer):
11 """
12 For IPython console output or doctests, such as:
13
14 Tracebacks are not currently supported.
15
16 .. sourcecode:: ipython
17
18 In [1]: a = 'foo'
19
20 In [2]: a
21 Out[2]: 'foo'
22
23 In [3]: print a
24 foo
25
26 In [4]: 1 / 0
27 """
28 name = 'IPython console session'
29 aliases = ['ipython']
30 mimetypes = ['text/x-ipython-console']
31 input_prompt = re.compile("(In \[[0-9]+\]: )|( \.\.\.+:)")
32 output_prompt = re.compile("(Out\[[0-9]+\]: )|( \.\.\.+:)")
33 continue_prompt = re.compile(" \.\.\.+:")
34 tb_start = re.compile("\-+")
35
36 def get_tokens_unprocessed(self, text):
37 pylexer = PythonLexer(**self.options)
38 tblexer = PythonTracebackLexer(**self.options)
39
40 curcode = ''
41 insertions = []
42 for match in line_re.finditer(text):
43 line = match.group()
44 input_prompt = self.input_prompt.match(line)
45 continue_prompt = self.continue_prompt.match(line.rstrip())
46 output_prompt = self.output_prompt.match(line)
47 if line.startswith("#"):
48 insertions.append((len(curcode),
49 [(0, Comment, line)]))
50 elif input_prompt is not None:
51 insertions.append((len(curcode),
52 [(0, Generic.Prompt, input_prompt.group())]))
53 curcode += line[input_prompt.end():]
54 elif continue_prompt is not None:
55 insertions.append((len(curcode),
56 [(0, Generic.Prompt, continue_prompt.group())]))
57 curcode += line[continue_prompt.end():]
58 elif output_prompt is not None:
59 insertions.append((len(curcode),
60 [(0, Generic.Output, output_prompt.group())]))
61 curcode += line[output_prompt.end():]
62 else:
63 if curcode:
64 for item in do_insertions(insertions,
65 pylexer.get_tokens_unprocessed(curcode)):
66 yield item
67 curcode = ''
68 insertions = []
69 yield match.start(), Generic.Output, line
70 if curcode:
71 for item in do_insertions(insertions,
72 pylexer.get_tokens_unprocessed(curcode)):
73 yield item
74
75 highlighting.lexers['ipython'] = IPythonConsoleLexer()
@@ -0,0 +1,87
1 #
2 # A pair of directives for inserting content that will only appear in
3 # either html or latex.
4 #
5
6 from docutils.nodes import Body, Element
7 from docutils.writers.html4css1 import HTMLTranslator
8 from sphinx.latexwriter import LaTeXTranslator
9 from docutils.parsers.rst import directives
10
11 class html_only(Body, Element):
12 pass
13
14 class latex_only(Body, Element):
15 pass
16
17 def run(content, node_class, state, content_offset):
18 text = '\n'.join(content)
19 node = node_class(text)
20 state.nested_parse(content, content_offset, node)
21 return [node]
22
23 try:
24 from docutils.parsers.rst import Directive
25 except ImportError:
26 from docutils.parsers.rst.directives import _directives
27
28 def html_only_directive(name, arguments, options, content, lineno,
29 content_offset, block_text, state, state_machine):
30 return run(content, html_only, state, content_offset)
31
32 def latex_only_directive(name, arguments, options, content, lineno,
33 content_offset, block_text, state, state_machine):
34 return run(content, latex_only, state, content_offset)
35
36 for func in (html_only_directive, latex_only_directive):
37 func.content = 1
38 func.options = {}
39 func.arguments = None
40
41 _directives['htmlonly'] = html_only_directive
42 _directives['latexonly'] = latex_only_directive
43 else:
44 class OnlyDirective(Directive):
45 has_content = True
46 required_arguments = 0
47 optional_arguments = 0
48 final_argument_whitespace = True
49 option_spec = {}
50
51 def run(self):
52 self.assert_has_content()
53 return run(self.content, self.node_class,
54 self.state, self.content_offset)
55
56 class HtmlOnlyDirective(OnlyDirective):
57 node_class = html_only
58
59 class LatexOnlyDirective(OnlyDirective):
60 node_class = latex_only
61
62 directives.register_directive('htmlonly', HtmlOnlyDirective)
63 directives.register_directive('latexonly', LatexOnlyDirective)
64
65 def setup(app):
66 app.add_node(html_only)
67 app.add_node(latex_only)
68
69 # Add visit/depart methods to HTML-Translator:
70 def visit_perform(self, node):
71 pass
72 def depart_perform(self, node):
73 pass
74 def visit_ignore(self, node):
75 node.children = []
76 def depart_ignore(self, node):
77 node.children = []
78
79 HTMLTranslator.visit_html_only = visit_perform
80 HTMLTranslator.depart_html_only = depart_perform
81 HTMLTranslator.visit_latex_only = visit_ignore
82 HTMLTranslator.depart_latex_only = depart_ignore
83
84 LaTeXTranslator.visit_html_only = visit_ignore
85 LaTeXTranslator.depart_html_only = depart_ignore
86 LaTeXTranslator.visit_latex_only = visit_perform
87 LaTeXTranslator.depart_latex_only = depart_perform
@@ -0,0 +1,155
1 """A special directive for including a matplotlib plot.
2
3 Given a path to a .py file, it includes the source code inline, then:
4
5 - On HTML, will include a .png with a link to a high-res .png.
6
7 - On LaTeX, will include a .pdf
8
9 This directive supports all of the options of the `image` directive,
10 except for `target` (since plot will add its own target).
11
12 Additionally, if the :include-source: option is provided, the literal
13 source will be included inline, as well as a link to the source.
14 """
15
16 import sys, os, glob, shutil
17 from docutils.parsers.rst import directives
18
19 try:
20 # docutils 0.4
21 from docutils.parsers.rst.directives.images import align
22 except ImportError:
23 # docutils 0.5
24 from docutils.parsers.rst.directives.images import Image
25 align = Image.align
26
27
28 import matplotlib
29 import IPython.Shell
30 matplotlib.use('Agg')
31 import matplotlib.pyplot as plt
32
33 mplshell = IPython.Shell.MatplotlibShell('mpl')
34
35 options = {'alt': directives.unchanged,
36 'height': directives.length_or_unitless,
37 'width': directives.length_or_percentage_or_unitless,
38 'scale': directives.nonnegative_int,
39 'align': align,
40 'class': directives.class_option,
41 'include-source': directives.flag }
42
43 template = """
44 .. htmlonly::
45
46 [`source code <../%(srcdir)s/%(basename)s.py>`__,
47 `png <../%(srcdir)s/%(basename)s.hires.png>`__,
48 `pdf <../%(srcdir)s/%(basename)s.pdf>`__]
49
50 .. image:: ../%(srcdir)s/%(basename)s.png
51 %(options)s
52
53 .. latexonly::
54 .. image:: ../%(srcdir)s/%(basename)s.pdf
55 %(options)s
56
57 """
58
59 def makefig(fullpath, outdir):
60 """
61 run a pyplot script and save the low and high res PNGs and a PDF in _static
62 """
63
64 fullpath = str(fullpath) # todo, why is unicode breaking this
65 formats = [('png', 100),
66 ('hires.png', 200),
67 ('pdf', 72),
68 ]
69
70 basedir, fname = os.path.split(fullpath)
71 basename, ext = os.path.splitext(fname)
72 all_exists = True
73
74 if basedir != outdir:
75 shutil.copyfile(fullpath, os.path.join(outdir, fname))
76
77 for format, dpi in formats:
78 outname = os.path.join(outdir, '%s.%s' % (basename, format))
79 if not os.path.exists(outname):
80 all_exists = False
81 break
82
83 if all_exists:
84 print ' already have %s'%fullpath
85 return
86
87 print ' building %s'%fullpath
88 plt.close('all') # we need to clear between runs
89 matplotlib.rcdefaults()
90
91 mplshell.magic_run(fullpath)
92 for format, dpi in formats:
93 outname = os.path.join(outdir, '%s.%s' % (basename, format))
94 if os.path.exists(outname): continue
95 plt.savefig(outname, dpi=dpi)
96
97 def run(arguments, options, state_machine, lineno):
98 reference = directives.uri(arguments[0])
99 basedir, fname = os.path.split(reference)
100 basename, ext = os.path.splitext(fname)
101
102 # todo - should we be using the _static dir for the outdir, I am
103 # not sure we want to corrupt that dir with autogenerated files
104 # since it also has permanent files in it which makes it difficult
105 # to clean (save an rm -rf followed by and svn up)
106 srcdir = 'pyplots'
107
108 makefig(os.path.join(srcdir, reference), srcdir)
109
110 # todo: it is not great design to assume the makefile is putting
111 # the figs into the right place, so we may want to do that here instead.
112
113 if options.has_key('include-source'):
114 lines = ['.. literalinclude:: ../pyplots/%(reference)s' % locals()]
115 del options['include-source']
116 else:
117 lines = []
118
119 options = [' :%s: %s' % (key, val) for key, val in
120 options.items()]
121 options = "\n".join(options)
122
123 lines.extend((template % locals()).split('\n'))
124
125 state_machine.insert_input(
126 lines, state_machine.input_lines.source(0))
127 return []
128
129
130 try:
131 from docutils.parsers.rst import Directive
132 except ImportError:
133 from docutils.parsers.rst.directives import _directives
134
135 def plot_directive(name, arguments, options, content, lineno,
136 content_offset, block_text, state, state_machine):
137 return run(arguments, options, state_machine, lineno)
138 plot_directive.__doc__ = __doc__
139 plot_directive.arguments = (1, 0, 1)
140 plot_directive.options = options
141
142 _directives['plot'] = plot_directive
143 else:
144 class plot_directive(Directive):
145 required_arguments = 1
146 optional_arguments = 0
147 final_argument_whitespace = True
148 option_spec = options
149 def run(self):
150 return run(self.arguments, self.options,
151 self.state_machine, self.lineno)
152 plot_directive.__doc__ = __doc__
153
154 directives.register_directive('plot', plot_directive)
155
@@ -0,0 +1,172
1 #!/usr/bin/env python
2 """A parallel tasking tool that uses asynchronous programming. This uses
3 blocking client to get taskid, but returns a Deferred as the result of
4 run(). Users should attach their callbacks on these Deferreds.
5
6 Only returning of results is asynchronous. Submitting tasks and getting task
7 ids are done synchronously.
8
9 Yichun Wei 03/2008
10 """
11
12 import inspect
13 import itertools
14 import numpy as N
15
16 from twisted.python import log
17 from ipython1.kernel import client
18 from ipython1.kernel.client import Task
19
20 """ After http://trac.pocoo.org/repos/pocoo/trunk/pocoo/utils/decorators.py
21 """
22 class submit_job(object):
23 """ a decorator factory: takes a MultiEngineClient a TaskClient, returns a
24 decorator, that makes a call to the decorated func as a task in ipython1
25 and submit it to IPython1 controller:
26 """
27 def __init__(self, rc, tc):
28 self.rc = rc
29 self.tc = tc
30
31 def __call__(self, func):
32 return self._decorate(func)
33
34 def _getinfo(self, func):
35 assert inspect.ismethod(func) or inspect.isfunction(func)
36 regargs, varargs, varkwargs, defaults = inspect.getargspec(func)
37 argnames = list(regargs)
38 if varargs:
39 argnames.append(varargs)
40 if varkwargs:
41 argnames.append(varkwargs)
42 counter = itertools.count()
43 fullsign = inspect.formatargspec(
44 regargs, varargs, varkwargs, defaults,
45 formatvalue=lambda value: '=defarg[%i]' % counter.next())[1:-1]
46 shortsign = inspect.formatargspec(
47 regargs, varargs, varkwargs, defaults,
48 formatvalue=lambda value: '')[1:-1]
49 dic = dict(('arg%s' % n, name) for n, name in enumerate(argnames))
50 dic.update(name=func.__name__, argnames=argnames, shortsign=shortsign,
51 fullsign = fullsign, defarg = func.func_defaults or ())
52 return dic
53
54 def _decorate(self, func):
55 """
56 Takes a function and a remote controller and returns a function
57 decorated that is going to submit the job with the controller.
58 The decorated function is obtained by evaluating a lambda
59 function with the correct signature.
60
61 the TaskController setupNS doesn't cope with functions, but we
62 can use RemoteController to push functions/modules into engines.
63
64 Changes:
65 200803. In new ipython1, we use push_function for functions.
66 """
67 rc, tc = self.rc, self.tc
68 infodict = self._getinfo(func)
69 if 'rc' in infodict['argnames']:
70 raise NameError, "You cannot use rc as argument names!"
71
72 # we assume the engines' namepace has been prepared.
73 # ns[func.__name__] is already the decorated closure function.
74 # we need to change it back to the original function:
75 ns = {}
76 ns[func.__name__] = func
77
78 # push func and all its environment/prerequesites to engines
79 rc.push_function(ns, block=True) # note it is nonblock by default, not know if it causes problems
80
81 def do_submit_func(*args, **kwds):
82 jobns = {}
83
84 # Initialize job namespace with args that have default args
85 # now we support calls that uses default args
86 for n in infodict['fullsign'].split(','):
87 try:
88 vname, var = n.split('=')
89 vname, var = vname.strip(), var.strip()
90 except: # no defarg, one of vname, var is None
91 pass
92 else:
93 jobns.setdefault(vname, eval(var, infodict))
94
95 # push args and kwds, overwritting default args if needed.
96 nokwds = dict((n,v) for n,v in zip(infodict['argnames'], args)) # truncated
97 jobns.update(nokwds)
98 jobns.update(kwds)
99
100 task = Task('a_very_long_and_rare_name = %(name)s(%(shortsign)s)' % infodict,
101 pull=['a_very_long_and_rare_name'], push=jobns,)
102 jobid = tc.run(task)
103 # res is a deferred, one can attach callbacks on it
104 res = tc.task_controller.get_task_result(jobid, block=True)
105 res.addCallback(lambda x: x.ns['a_very_long_and_rare_name'])
106 res.addErrback(log.err)
107 return res
108
109 do_submit_func.rc = rc
110 do_submit_func.tc = tc
111 return do_submit_func
112
113
114 def parallelized(rc, tc, initstrlist=[]):
115 """ rc - remote controller
116 tc - taks controller
117 strlist - a list of str that's being executed on engines.
118 """
119 for cmd in initstrlist:
120 rc.execute(cmd, block=True)
121 return submit_job(rc, tc)
122
123
124 from twisted.internet import defer
125 from numpy import array, nan
126
127 def pmap(func, parr, **kwds):
128 """Run func on every element of parr (array), using the elements
129 as the only one parameter (so you can usually use a dict that
130 wraps many parameters). -> a result array of Deferreds with the
131 same shape. func.tc will be used as the taskclient.
132
133 **kwds are passed on to func, not changed.
134 """
135 assert func.tc
136 tc = func.tc
137
138 def run(p, **kwds):
139 if p:
140 return func(p, **kwds)
141 else:
142 return defer.succeed(nan)
143
144 reslist = [run(p, **kwds).addErrback(log.err) for p in parr.flat]
145 resarr = array(reslist)
146 resarr.shape = parr.shape
147 return resarr
148
149
150 if __name__=='__main__':
151
152 rc = client.MultiEngineClient(client.default_address)
153 tc = client.TaskClient(client.default_task_address)
154
155 # if commenting out the decorator you get a local running version
156 # instantly
157 @parallelized(rc, tc)
158 def f(a, b=1):
159 #from time import sleep
160 #sleep(1)
161 print "a,b=", a,b
162 return a+b
163
164 def showres(x):
165 print 'ans:',x
166
167 res = f(11,5)
168 res.addCallback(showres)
169
170 # this is not necessary in Twisted 8.0
171 from twisted.internet import reactor
172 reactor.run()
@@ -0,0 +1,119
1 import types
2
3 class AttributeBase(object):
4
5 def __get__(self, inst, cls=None):
6 if inst is None:
7 return self
8 try:
9 return inst._attributes[self.name]
10 except KeyError:
11 raise AttributeError("object has no attribute %r" % self.name)
12
13 def __set__(self, inst, value):
14 actualValue = self.validate(inst, self.name, value)
15 inst._attributes[self.name] = actualValue
16
17 def validate(self, inst, name, value):
18 raise NotImplementedError("validate must be implemented by a subclass")
19
20 class NameFinder(type):
21
22 def __new__(cls, name, bases, classdict):
23 attributeList = []
24 for k,v in classdict.iteritems():
25 if isinstance(v, AttributeBase):
26 v.name = k
27 attributeList.append(k)
28 classdict['_attributeList'] = attributeList
29 return type.__new__(cls, name, bases, classdict)
30
31 class HasAttributes(object):
32 __metaclass__ = NameFinder
33
34 def __init__(self):
35 self._attributes = {}
36
37 def getAttributeNames(self):
38 return self._attributeList
39
40 def getAttributesOfType(self, t, default=None):
41 result = {}
42 for a in self._attributeList:
43 if self.__class__.__dict__[a].__class__ == t:
44 try:
45 value = getattr(self, a)
46 except AttributeError:
47 value = None
48 result[a] = value
49 return result
50
51 class TypedAttribute(AttributeBase):
52
53 def validate(self, inst, name, value):
54 if type(value) != self._type:
55 raise TypeError("attribute %s must be of type %s" % (name, self._type))
56 else:
57 return value
58
59 # class Option(TypedAttribute):
60 #
61 # _type = types.IntType
62 #
63 # class Param(TypedAttribute):
64 #
65 # _type = types.FloatType
66 #
67 # class String(TypedAttribute):
68 #
69 # _type = types.StringType
70
71 class TypedSequenceAttribute(AttributeBase):
72
73 def validate(self, inst, name, value):
74 if type(value) != types.TupleType and type(value) != types.ListType:
75 raise TypeError("attribute %s must be a list or tuple" % (name))
76 else:
77 for item in value:
78 if type(item) != self._subtype:
79 raise TypeError("attribute %s must be a list or tuple of items with type %s" % (name, self._subtype))
80 return value
81
82 # class Instance(AttributeBase):
83 #
84 # def __init__(self, cls):
85 # self.cls = cls
86 #
87 # def validate(self, inst, name, value):
88 # if not isinstance(value, self.cls):
89 # raise TypeError("attribute %s must be an instance of class %s" % (name, self.cls))
90 # else:
91 # return value
92
93
94 # class OptVec(TypedSequenceAttribute):
95 #
96 # _subtype = types.IntType
97 #
98 # class PrmVec(TypedSequenceAttribute):
99 #
100 # _subtype = types.FloatType
101 #
102 # class StrVec(TypedSequenceAttribute):
103 #
104 # _subtype = types.StringType
105 #
106 #
107 # class Bar(HasAttributes):
108 #
109 # a = Option()
110 #
111 # class Foo(HasAttributes):
112 #
113 # a = Option()
114 # b = Param()
115 # c = String()
116 # d = OptVec()
117 # e = PrmVec()
118 # f = StrVec()
119 # h = Instance(Bar) No newline at end of file
@@ -0,0 +1,8
1 #!/usr/bin/env python
2 # -*- coding: utf-8 -*-
3 """IPython Test Suite Runner.
4 """
5
6 from IPython.testing import iptest
7
8 iptest.main()
@@ -0,0 +1,20
1 #!/usr/bin/env python
2 """Call the compile script to check that all code we ship compiles correctly.
3 """
4
5 import os
6 import sys
7
8
9 vstr = '.'.join(map(str,sys.version_info[:2]))
10
11 stat = os.system('python %s/lib/python%s/compileall.py .' % (sys.prefix,vstr))
12
13 print
14 if stat:
15 print '*** THERE WAS AN ERROR! ***'
16 print 'See messages above for the actual file that produced it.'
17 else:
18 print 'OK'
19
20 sys.exit(stat)
@@ -1,170 +1,182
1 1 # -*- coding: utf-8 -*-
2 2 """Tools for coloring text in ANSI terminals.
3 3
4 4 $Id: ColorANSI.py 2167 2007-03-21 06:57:50Z fperez $"""
5 5
6 6 #*****************************************************************************
7 7 # Copyright (C) 2002-2006 Fernando Perez. <fperez@colorado.edu>
8 8 #
9 9 # Distributed under the terms of the BSD License. The full license is in
10 10 # the file COPYING, distributed as part of this software.
11 11 #*****************************************************************************
12 12
13 13 from IPython import Release
14 14 __author__ = '%s <%s>' % Release.authors['Fernando']
15 15 __license__ = Release.license
16 16
17 17 __all__ = ['TermColors','InputTermColors','ColorScheme','ColorSchemeTable']
18 18
19 19 import os
20 20
21 21 from IPython.ipstruct import Struct
22 22
23 23 def make_color_table(in_class):
24 24 """Build a set of color attributes in a class.
25 25
26 26 Helper function for building the *TermColors classes."""
27 27
28 28 color_templates = (
29 # Dark colors
29 30 ("Black" , "0;30"),
30 31 ("Red" , "0;31"),
31 32 ("Green" , "0;32"),
32 33 ("Brown" , "0;33"),
33 34 ("Blue" , "0;34"),
34 35 ("Purple" , "0;35"),
35 36 ("Cyan" , "0;36"),
36 37 ("LightGray" , "0;37"),
38 # Light colors
37 39 ("DarkGray" , "1;30"),
38 40 ("LightRed" , "1;31"),
39 41 ("LightGreen" , "1;32"),
40 42 ("Yellow" , "1;33"),
41 43 ("LightBlue" , "1;34"),
42 44 ("LightPurple" , "1;35"),
43 45 ("LightCyan" , "1;36"),
44 ("White" , "1;37"), )
46 ("White" , "1;37"),
47 # Blinking colors. Probably should not be used in anything serious.
48 ("BlinkBlack" , "5;30"),
49 ("BlinkRed" , "5;31"),
50 ("BlinkGreen" , "5;32"),
51 ("BlinkYellow" , "5;33"),
52 ("BlinkBlue" , "5;34"),
53 ("BlinkPurple" , "5;35"),
54 ("BlinkCyan" , "5;36"),
55 ("BlinkLightGray", "5;37"),
56 )
45 57
46 58 for name,value in color_templates:
47 59 setattr(in_class,name,in_class._base % value)
48 60
49 61 class TermColors:
50 62 """Color escape sequences.
51 63
52 64 This class defines the escape sequences for all the standard (ANSI?)
53 65 colors in terminals. Also defines a NoColor escape which is just the null
54 66 string, suitable for defining 'dummy' color schemes in terminals which get
55 67 confused by color escapes.
56 68
57 69 This class should be used as a mixin for building color schemes."""
58 70
59 71 NoColor = '' # for color schemes in color-less terminals.
60 72 Normal = '\033[0m' # Reset normal coloring
61 73 _base = '\033[%sm' # Template for all other colors
62 74
63 75 # Build the actual color table as a set of class attributes:
64 76 make_color_table(TermColors)
65 77
66 78 class InputTermColors:
67 79 """Color escape sequences for input prompts.
68 80
69 81 This class is similar to TermColors, but the escapes are wrapped in \001
70 82 and \002 so that readline can properly know the length of each line and
71 83 can wrap lines accordingly. Use this class for any colored text which
72 84 needs to be used in input prompts, such as in calls to raw_input().
73 85
74 86 This class defines the escape sequences for all the standard (ANSI?)
75 87 colors in terminals. Also defines a NoColor escape which is just the null
76 88 string, suitable for defining 'dummy' color schemes in terminals which get
77 89 confused by color escapes.
78 90
79 91 This class should be used as a mixin for building color schemes."""
80 92
81 93 NoColor = '' # for color schemes in color-less terminals.
82 94
83 95 if os.name == 'nt' and os.environ.get('TERM','dumb') == 'emacs':
84 96 # (X)emacs on W32 gets confused with \001 and \002 so we remove them
85 97 Normal = '\033[0m' # Reset normal coloring
86 98 _base = '\033[%sm' # Template for all other colors
87 99 else:
88 100 Normal = '\001\033[0m\002' # Reset normal coloring
89 101 _base = '\001\033[%sm\002' # Template for all other colors
90 102
91 103 # Build the actual color table as a set of class attributes:
92 104 make_color_table(InputTermColors)
93 105
94 106 class ColorScheme:
95 107 """Generic color scheme class. Just a name and a Struct."""
96 108 def __init__(self,__scheme_name_,colordict=None,**colormap):
97 109 self.name = __scheme_name_
98 110 if colordict is None:
99 111 self.colors = Struct(**colormap)
100 112 else:
101 113 self.colors = Struct(colordict)
102 114
103 115 def copy(self,name=None):
104 116 """Return a full copy of the object, optionally renaming it."""
105 117 if name is None:
106 118 name = self.name
107 119 return ColorScheme(name,self.colors.__dict__)
108 120
109 121 class ColorSchemeTable(dict):
110 122 """General class to handle tables of color schemes.
111 123
112 124 It's basically a dict of color schemes with a couple of shorthand
113 125 attributes and some convenient methods.
114 126
115 127 active_scheme_name -> obvious
116 128 active_colors -> actual color table of the active scheme"""
117 129
118 130 def __init__(self,scheme_list=None,default_scheme=''):
119 131 """Create a table of color schemes.
120 132
121 133 The table can be created empty and manually filled or it can be
122 134 created with a list of valid color schemes AND the specification for
123 135 the default active scheme.
124 136 """
125 137
126 138 # create object attributes to be set later
127 139 self.active_scheme_name = ''
128 140 self.active_colors = None
129 141
130 142 if scheme_list:
131 143 if default_scheme == '':
132 144 raise ValueError,'you must specify the default color scheme'
133 145 for scheme in scheme_list:
134 146 self.add_scheme(scheme)
135 147 self.set_active_scheme(default_scheme)
136 148
137 149 def copy(self):
138 150 """Return full copy of object"""
139 151 return ColorSchemeTable(self.values(),self.active_scheme_name)
140 152
141 153 def add_scheme(self,new_scheme):
142 154 """Add a new color scheme to the table."""
143 155 if not isinstance(new_scheme,ColorScheme):
144 156 raise ValueError,'ColorSchemeTable only accepts ColorScheme instances'
145 157 self[new_scheme.name] = new_scheme
146 158
147 159 def set_active_scheme(self,scheme,case_sensitive=0):
148 160 """Set the currently active scheme.
149 161
150 162 Names are by default compared in a case-insensitive way, but this can
151 163 be changed by setting the parameter case_sensitive to true."""
152 164
153 165 scheme_names = self.keys()
154 166 if case_sensitive:
155 167 valid_schemes = scheme_names
156 168 scheme_test = scheme
157 169 else:
158 170 valid_schemes = [s.lower() for s in scheme_names]
159 171 scheme_test = scheme.lower()
160 172 try:
161 173 scheme_idx = valid_schemes.index(scheme_test)
162 174 except ValueError:
163 175 raise ValueError,'Unrecognized color scheme: ' + scheme + \
164 176 '\nValid schemes: '+str(scheme_names).replace("'', ",'')
165 177 else:
166 178 active = scheme_names[scheme_idx]
167 179 self.active_scheme_name = active
168 180 self.active_colors = self[active].colors
169 181 # Now allow using '' as an index for the current active scheme
170 182 self[''] = self[active]
@@ -1,394 +1,400
1 1
2 2 """ Implementations for various useful completers
3 3
4 4 See Extensions/ipy_stock_completers.py on examples of how to enable a completer,
5 5 but the basic idea is to do:
6 6
7 7 ip.set_hook('complete_command', svn_completer, str_key = 'svn')
8 8
9 9 """
10 10 import IPython.ipapi
11 11 import glob,os,shlex,sys
12 12 import inspect
13 13 from time import time
14 14 from zipimport import zipimporter
15 15 ip = IPython.ipapi.get()
16 16
17 17 try:
18 18 set
19 19 except:
20 20 from sets import Set as set
21 21
22 22 TIMEOUT_STORAGE = 3 #Time in seconds after which the rootmodules will be stored
23 23 TIMEOUT_GIVEUP = 20 #Time in seconds after which we give up
24 24
25 25 def quick_completer(cmd, completions):
26 26 """ Easily create a trivial completer for a command.
27 27
28 28 Takes either a list of completions, or all completions in string
29 29 (that will be split on whitespace)
30 30
31 31 Example::
32 32
33 33 [d:\ipython]|1> import ipy_completers
34 34 [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz'])
35 35 [d:\ipython]|3> foo b<TAB>
36 36 bar baz
37 37 [d:\ipython]|3> foo ba
38 38 """
39 39 if isinstance(completions, basestring):
40 40
41 41 completions = completions.split()
42 42 def do_complete(self,event):
43 43 return completions
44 44
45 45 ip.set_hook('complete_command',do_complete, str_key = cmd)
46 46
47 47 def getRootModules():
48 48 """
49 49 Returns a list containing the names of all the modules available in the
50 50 folders of the pythonpath.
51 51 """
52 52 modules = []
53 53 if ip.db.has_key('rootmodules'):
54 54 return ip.db['rootmodules']
55 55 t = time()
56 56 store = False
57 57 for path in sys.path:
58 58 modules += moduleList(path)
59 59 if time() - t >= TIMEOUT_STORAGE and not store:
60 60 store = True
61 61 print "\nCaching the list of root modules, please wait!"
62 62 print "(This will only be done once - type '%rehashx' to " + \
63 63 "reset cache!)"
64 64 print
65 65 if time() - t > TIMEOUT_GIVEUP:
66 66 print "This is taking too long, we give up."
67 67 print
68 68 ip.db['rootmodules'] = []
69 69 return []
70 70
71 71 modules += sys.builtin_module_names
72 72
73 73 modules = list(set(modules))
74 74 if '__init__' in modules:
75 75 modules.remove('__init__')
76 76 modules = list(set(modules))
77 77 if store:
78 78 ip.db['rootmodules'] = modules
79 79 return modules
80 80
81 81 def moduleList(path):
82 82 """
83 83 Return the list containing the names of the modules available in the given
84 84 folder.
85 85 """
86 86
87 87 if os.path.isdir(path):
88 88 folder_list = os.listdir(path)
89 89 elif path.endswith('.egg'):
90 90 try:
91 91 folder_list = [f for f in zipimporter(path)._files]
92 92 except:
93 93 folder_list = []
94 94 else:
95 95 folder_list = []
96 96 #folder_list = glob.glob(os.path.join(path,'*'))
97 97 folder_list = [p for p in folder_list \
98 98 if os.path.exists(os.path.join(path, p,'__init__.py'))\
99 99 or p[-3:] in ('.py','.so')\
100 100 or p[-4:] in ('.pyc','.pyo','.pyd')]
101 101
102 102 folder_list = [os.path.basename(p).split('.')[0] for p in folder_list]
103 103 return folder_list
104 104
105 105 def moduleCompletion(line):
106 106 """
107 107 Returns a list containing the completion possibilities for an import line.
108 108 The line looks like this :
109 109 'import xml.d'
110 110 'from xml.dom import'
111 111 """
112 112 def tryImport(mod, only_modules=False):
113 113 def isImportable(module, attr):
114 114 if only_modules:
115 115 return inspect.ismodule(getattr(module, attr))
116 116 else:
117 117 return not(attr[:2] == '__' and attr[-2:] == '__')
118 118 try:
119 119 m = __import__(mod)
120 120 except:
121 121 return []
122 122 mods = mod.split('.')
123 123 for module in mods[1:]:
124 124 m = getattr(m,module)
125 125 if (not hasattr(m, '__file__')) or (not only_modules) or\
126 126 (hasattr(m, '__file__') and '__init__' in m.__file__):
127 127 completion_list = [attr for attr in dir(m) if isImportable(m, attr)]
128 128 completion_list.extend(getattr(m,'__all__',[]))
129 129 if hasattr(m, '__file__') and '__init__' in m.__file__:
130 130 completion_list.extend(moduleList(os.path.dirname(m.__file__)))
131 131 completion_list = list(set(completion_list))
132 132 if '__init__' in completion_list:
133 133 completion_list.remove('__init__')
134 134 return completion_list
135 135
136 136 words = line.split(' ')
137 137 if len(words) == 3 and words[0] == 'from':
138 138 return ['import ']
139 139 if len(words) < 3 and (words[0] in ['import','from']) :
140 140 if len(words) == 1:
141 141 return getRootModules()
142 142 mod = words[1].split('.')
143 143 if len(mod) < 2:
144 144 return getRootModules()
145 145 completion_list = tryImport('.'.join(mod[:-1]), True)
146 146 completion_list = ['.'.join(mod[:-1] + [el]) for el in completion_list]
147 147 return completion_list
148 148 if len(words) >= 3 and words[0] == 'from':
149 149 mod = words[1]
150 150 return tryImport(mod)
151 151
152 152 def vcs_completer(commands, event):
153 153 """ utility to make writing typical version control app completers easier
154 154
155 155 VCS command line apps typically have the format:
156 156
157 157 [sudo ]PROGNAME [help] [command] file file...
158 158
159 159 """
160 160
161 161
162 162 cmd_param = event.line.split()
163 163 if event.line.endswith(' '):
164 164 cmd_param.append('')
165 165
166 166 if cmd_param[0] == 'sudo':
167 167 cmd_param = cmd_param[1:]
168 168
169 169 if len(cmd_param) == 2 or 'help' in cmd_param:
170 170 return commands.split()
171 171
172 172 return ip.IP.Completer.file_matches(event.symbol)
173 173
174 174
175 175 pkg_cache = None
176 176
177 177 def module_completer(self,event):
178 178 """ Give completions after user has typed 'import ...' or 'from ...'"""
179 179
180 180 # This works in all versions of python. While 2.5 has
181 181 # pkgutil.walk_packages(), that particular routine is fairly dangerous,
182 182 # since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full
183 183 # of possibly problematic side effects.
184 184 # This search the folders in the sys.path for available modules.
185 185
186 186 return moduleCompletion(event.line)
187 187
188 188
189 189 svn_commands = """\
190 190 add blame praise annotate ann cat checkout co cleanup commit ci copy
191 191 cp delete del remove rm diff di export help ? h import info list ls
192 192 lock log merge mkdir move mv rename ren propdel pdel pd propedit pedit
193 193 pe propget pget pg proplist plist pl propset pset ps resolved revert
194 194 status stat st switch sw unlock update
195 195 """
196 196
197 197 def svn_completer(self,event):
198 198 return vcs_completer(svn_commands, event)
199 199
200 200
201 201 hg_commands = """
202 202 add addremove annotate archive backout branch branches bundle cat
203 203 clone commit copy diff export grep heads help identify import incoming
204 204 init locate log manifest merge outgoing parents paths pull push
205 205 qapplied qclone qcommit qdelete qdiff qfold qguard qheader qimport
206 206 qinit qnew qnext qpop qprev qpush qrefresh qrename qrestore qsave
207 207 qselect qseries qtop qunapplied recover remove rename revert rollback
208 208 root serve showconfig status strip tag tags tip unbundle update verify
209 209 version
210 210 """
211 211
212 212 def hg_completer(self,event):
213 213 """ Completer for mercurial commands """
214 214
215 215 return vcs_completer(hg_commands, event)
216 216
217 217
218 218
219 219 __bzr_commands = None
220 220
221 221 def bzr_commands():
222 222 global __bzr_commands
223 223 if __bzr_commands is not None:
224 224 return __bzr_commands
225 225 out = os.popen('bzr help commands')
226 226 __bzr_commands = [l.split()[0] for l in out]
227 227 return __bzr_commands
228 228
229 229 def bzr_completer(self,event):
230 230 """ Completer for bazaar commands """
231 231 cmd_param = event.line.split()
232 232 if event.line.endswith(' '):
233 233 cmd_param.append('')
234 234
235 235 if len(cmd_param) > 2:
236 236 cmd = cmd_param[1]
237 237 param = cmd_param[-1]
238 238 output_file = (param == '--output=')
239 239 if cmd == 'help':
240 240 return bzr_commands()
241 241 elif cmd in ['bundle-revisions','conflicts',
242 242 'deleted','nick','register-branch',
243 243 'serve','unbind','upgrade','version',
244 244 'whoami'] and not output_file:
245 245 return []
246 246 else:
247 247 # the rest are probably file names
248 248 return ip.IP.Completer.file_matches(event.symbol)
249 249
250 250 return bzr_commands()
251 251
252 252
253 253 def shlex_split(x):
254 254 """Helper function to split lines into segments."""
255 255 #shlex.split raise exception if syntax error in sh syntax
256 256 #for example if no closing " is found. This function keeps dropping
257 257 #the last character of the line until shlex.split does not raise
258 258 #exception. Adds end of the line to the result of shlex.split
259 259 #example: %run "c:/python -> ['%run','"c:/python']
260 260 endofline=[]
261 261 while x!="":
262 262 try:
263 263 comps=shlex.split(x)
264 264 if len(endofline)>=1:
265 265 comps.append("".join(endofline))
266 266 return comps
267 267 except ValueError:
268 268 endofline=[x[-1:]]+endofline
269 269 x=x[:-1]
270 270 return ["".join(endofline)]
271 271
272 272 def runlistpy(self, event):
273 273 comps = shlex_split(event.line)
274 274 relpath = (len(comps) > 1 and comps[-1] or '').strip("'\"")
275 275
276 276 #print "\nev=",event # dbg
277 277 #print "rp=",relpath # dbg
278 278 #print 'comps=',comps # dbg
279 279
280 280 lglob = glob.glob
281 281 isdir = os.path.isdir
282 282 if relpath.startswith('~'):
283 283 relpath = os.path.expanduser(relpath)
284 284 dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*')
285 285 if isdir(f)]
286 286
287 287 # Find if the user has already typed the first filename, after which we
288 288 # should complete on all files, since after the first one other files may
289 289 # be arguments to the input script.
290 290 #filter(
291 291 if filter(lambda f: f.endswith('.py') or f.endswith('.ipy') or
292 292 f.endswith('.pyw'),comps):
293 293 pys = [f.replace('\\','/') for f in lglob('*')]
294 294 else:
295 295 pys = [f.replace('\\','/')
296 296 for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') +
297 297 lglob(relpath + '*.pyw')]
298 298 return dirs + pys
299 299
300 300
301 301 greedy_cd_completer = False
302 302
303 303 def cd_completer(self, event):
304 304 relpath = event.symbol
305 305 #print event # dbg
306 306 if '-b' in event.line:
307 307 # return only bookmark completions
308 308 bkms = self.db.get('bookmarks',{})
309 309 return bkms.keys()
310 310
311 311
312 312 if event.symbol == '-':
313 313 width_dh = str(len(str(len(ip.user_ns['_dh']) + 1)))
314 314 # jump in directory history by number
315 315 fmt = '-%0' + width_dh +'d [%s]'
316 316 ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])]
317 317 if len(ents) > 1:
318 318 return ents
319 319 return []
320 320
321 321 if event.symbol.startswith('--'):
322 322 return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']]
323 323
324 324 if relpath.startswith('~'):
325 325 relpath = os.path.expanduser(relpath).replace('\\','/')
326 326 found = []
327 327 for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*')
328 328 if os.path.isdir(f)]:
329 329 if ' ' in d:
330 330 # we don't want to deal with any of that, complex code
331 331 # for this is elsewhere
332 332 raise IPython.ipapi.TryNext
333 333 found.append( d )
334 334
335 335 if not found:
336 336 if os.path.isdir(relpath):
337 337 return [relpath]
338 # if no completions so far, try bookmarks
339 bks = self.db.get('bookmarks',{}).keys()
340 bkmatches = [s for s in bks if s.startswith(event.symbol)]
341 if bkmatches:
342 return bkmatches
343
338 344 raise IPython.ipapi.TryNext
339 345
340 346
341 347 def single_dir_expand(matches):
342 348 "Recursively expand match lists containing a single dir."
343 349
344 350 if len(matches) == 1 and os.path.isdir(matches[0]):
345 351 # Takes care of links to directories also. Use '/'
346 352 # explicitly, even under Windows, so that name completions
347 353 # don't end up escaped.
348 354 d = matches[0]
349 355 if d[-1] in ['/','\\']:
350 356 d = d[:-1]
351 357
352 358 subdirs = [p for p in os.listdir(d) if os.path.isdir( d + '/' + p) and not p.startswith('.')]
353 359 if subdirs:
354 360 matches = [ (d + '/' + p) for p in subdirs ]
355 361 return single_dir_expand(matches)
356 362 else:
357 363 return matches
358 364 else:
359 365 return matches
360 366
361 367 if greedy_cd_completer:
362 368 return single_dir_expand(found)
363 369 else:
364 370 return found
365 371
366 372 def apt_get_packages(prefix):
367 373 out = os.popen('apt-cache pkgnames')
368 374 for p in out:
369 375 if p.startswith(prefix):
370 376 yield p.rstrip()
371 377
372 378
373 379 apt_commands = """\
374 380 update upgrade install remove purge source build-dep dist-upgrade
375 381 dselect-upgrade clean autoclean check"""
376 382
377 383 def apt_completer(self, event):
378 384 """ Completer for apt-get (uses apt-cache internally)
379 385
380 386 """
381 387
382 388
383 389 cmd_param = event.line.split()
384 390 if event.line.endswith(' '):
385 391 cmd_param.append('')
386 392
387 393 if cmd_param[0] == 'sudo':
388 394 cmd_param = cmd_param[1:]
389 395
390 396 if len(cmd_param) == 2 or 'help' in cmd_param:
391 397 return apt_commands.split()
392 398
393 399 return list(apt_get_packages(event.symbol))
394 400
@@ -1,84 +1,88
1 1 """ 'editor' hooks for common editors that work well with ipython
2 2
3 3 They should honor the line number argument, at least.
4 4
5 5 Contributions are *very* welcome.
6 6 """
7 7
8 8 import IPython.ipapi
9 9 ip = IPython.ipapi.get()
10 10
11 11 from IPython.Itpl import itplns
12 12 import os
13 13
14 14 def install_editor(run_template, wait = False):
15 15 """ Gets a template in format "myeditor bah bah $file bah bah $line"
16 16
17 17 $file will be replaced by file name, $line by line number (or 0).
18 18 Installs the editor that is called by IPython, instead of the default
19 19 notepad or vi.
20 20
21 21 If wait is true, wait until the user presses enter before returning,
22 22 to facilitate non-blocking editors that exit immediately after
23 23 the call.
24 24 """
25 25
26 26 def call_editor(self, file, line=0):
27 27 if line is None:
28 28 line = 0
29 29 cmd = itplns(run_template, locals())
30 30 print ">",cmd
31 os.system(cmd)
31 if os.system(cmd) != 0:
32 raise IPython.ipapi.TryNext()
32 33 if wait:
33 34 raw_input("Press Enter when done editing:")
34 35
35 36 ip.set_hook('editor',call_editor)
36 37
37 38
38 39 # in these, exe is always the path/name of the executable. Useful
39 40 # if you don't have the editor directory in your path
40 41
41 42 def komodo(exe = 'komodo'):
42 43 """ Activestate Komodo [Edit] """
43 44 install_editor(exe + ' -l $line "$file"', wait = True)
44 45
45 46 def scite(exe = "scite"):
46 47 """ SciTE or Sc1 """
47 48 install_editor(exe + ' "$file" -goto:$line')
48 49
49 50 def notepadplusplus(exe = 'notepad++'):
50 51 """ Notepad++ http://notepad-plus.sourceforge.net """
51 52 install_editor(exe + ' -n$line "$file"')
52 53
53 54 def jed(exe = 'jed'):
54 55 """ JED, the lightweight emacsish editor """
55 56 install_editor(exe + ' +$line "$file"')
56 57
57 58 def idle(exe = None):
58 59 """ Idle, the editor bundled with python
59 60
60 61 Should be pretty smart about finding the executable.
61 62 """
62 63 if exe is None:
63 64 import idlelib
64 65 p = os.path.dirname(idlelib.__file__)
65 66 exe = p + '/idle.py'
66 67 install_editor(exe + ' "$file"')
67
68
69 def mate(exe = 'mate'):
70 """ TextMate, the missing editor"""
71 install_editor(exe + ' -w -l $line "$file"')
68 72
69 73 # these are untested, report any problems
70 74
71 75 def emacs(exe = 'emacs'):
72 76 install_editor(exe + ' +$line "$file"')
73 77
74 78 def gnuclient(exe= 'gnuclient'):
75 79 install_editor(exe + ' -nw +$line "$file"')
76 80
77 81 def crimson_editor(exe = 'cedt.exe'):
78 82 install_editor(exe + ' /L:$line "$file"')
79 83
80 84 def kate(exe = 'kate'):
81 85 install_editor(exe + ' -u -l $line "$file"')
82 86
83 87
84 88 No newline at end of file
@@ -1,258 +1,270
1 1 """Shell mode for IPython.
2 2
3 3 Start ipython in shell mode by invoking "ipython -p sh"
4 4
5 5 (the old version, "ipython -p pysh" still works but this is the more "modern"
6 6 shell mode and is recommended for users who don't care about pysh-mode
7 7 compatibility)
8 8 """
9 9
10 10 from IPython import ipapi
11 import os,textwrap
11 import os,re,textwrap
12 12
13 13 # The import below effectively obsoletes your old-style ipythonrc[.ini],
14 14 # so consider yourself warned!
15 15
16 16 import ipy_defaults
17 17
18 18 def main():
19 19 ip = ipapi.get()
20 20 o = ip.options
21 21 # autocall to "full" mode (smart mode is default, I like full mode)
22 22
23 23 o.autocall = 2
24 24
25 25 # Jason Orendorff's path class is handy to have in user namespace
26 26 # if you are doing shell-like stuff
27 27 try:
28 28 ip.ex("from IPython.external.path import path" )
29 29 except ImportError:
30 30 pass
31 31
32 32 # beefed up %env is handy in shell mode
33 33 import envpersist
34 34
35 35 # To see where mycmd resides (in path/aliases), do %which mycmd
36 36 import ipy_which
37 37
38 38 # tab completers for hg, svn, ...
39 39 import ipy_app_completers
40 40
41 41 # To make executables foo and bar in mybin usable without PATH change, do:
42 42 # %rehashdir c:/mybin
43 43 # %store foo
44 44 # %store bar
45 45 import ipy_rehashdir
46 46
47 47 # does not work without subprocess module!
48 48 #import ipy_signals
49 49
50 50 ip.ex('import os')
51 51 ip.ex("def up(): os.chdir('..')")
52 52 ip.user_ns['LA'] = LastArgFinder()
53 # Nice prompt
54 53
55 o.prompt_in1= r'\C_LightBlue[\C_LightCyan\Y2\C_LightBlue]\C_Green|\#> '
54 # You can assign to _prompt_title variable
55 # to provide some extra information for prompt
56 # (e.g. the current mode, host/username...)
57
58 ip.user_ns['_prompt_title'] = ''
59
60 # Nice prompt
61 o.prompt_in1= r'\C_Green${_prompt_title}\C_LightBlue[\C_LightCyan\Y2\C_LightBlue]\C_Green|\#> '
56 62 o.prompt_in2= r'\C_Green|\C_LightGreen\D\C_Green> '
57 63 o.prompt_out= '<\#> '
58 64
59 65 from IPython import Release
60 66
61 67 import sys
62 68 # Non-chatty banner
63 69 o.banner = "IPython %s [on Py %s]\n" % (Release.version,sys.version.split(None,1)[0])
64 70
65 71
66 72 ip.IP.default_option('cd','-q')
67 73 ip.IP.default_option('macro', '-r')
68 74 # If you only rarely want to execute the things you %edit...
69 75 #ip.IP.default_option('edit','-x')
70 76
71 77
72 78 o.prompts_pad_left="1"
73 79 # Remove all blank lines in between prompts, like a normal shell.
74 80 o.separate_in="0"
75 81 o.separate_out="0"
76 82 o.separate_out2="0"
77 83
78 84 # now alias all syscommands
79 85
80 86 db = ip.db
81 87
82 88 syscmds = db.get("syscmdlist",[] )
83 89 if not syscmds:
84 90 print textwrap.dedent("""
85 91 System command list not initialized, probably the first run...
86 92 running %rehashx to refresh the command list. Run %rehashx
87 93 again to refresh command list (after installing new software etc.)
88 94 """)
89 95 ip.magic('rehashx')
90 96 syscmds = db.get("syscmdlist")
91 97
92 98 # lowcase aliases on win32 only
93 99 if os.name == 'posix':
94 100 mapper = lambda s:s
95 101 else:
96 102 def mapper(s): return s.lower()
97 103
98 104 for cmd in syscmds:
99 105 # print "sys",cmd #dbg
100 106 noext, ext = os.path.splitext(cmd)
101 key = mapper(noext)
107 if ext.lower() == '.exe':
108 cmd = noext
109
110 key = mapper(cmd)
102 111 if key not in ip.IP.alias_table:
103 ip.defalias(key, cmd)
112 # Dots will be removed from alias names, since ipython
113 # assumes names with dots to be python code
114
115 ip.defalias(key.replace('.',''), cmd)
104 116
105 117 # mglob combines 'find', recursion, exclusion... '%mglob?' to learn more
106 118 ip.load("IPython.external.mglob")
107 119
108 120 # win32 is crippled w/o cygwin, try to help it a little bit
109 121 if sys.platform == 'win32':
110 122 if 'cygwin' in os.environ['PATH'].lower():
111 123 # use the colors of cygwin ls (recommended)
112 124 ip.defalias('d', 'ls -F --color=auto')
113 125 else:
114 126 # get icp, imv, imkdir, igrep, irm,...
115 127 ip.load('ipy_fsops')
116 128
117 129 # and the next best thing to real 'ls -F'
118 130 ip.defalias('d','dir /w /og /on')
119 131
120 ip.set_hook('input_prefilter', dotslash_prefilter_f)
132 ip.set_hook('input_prefilter', slash_prefilter_f)
121 133 extend_shell_behavior(ip)
122 134
123 135 class LastArgFinder:
124 136 """ Allow $LA to work as "last argument of previous command", like $! in bash
125 137
126 138 To call this in normal IPython code, do LA()
127 139 """
128 140 def __call__(self, hist_idx = None):
129 141 ip = ipapi.get()
130 142 if hist_idx is None:
131 143 return str(self)
132 144 return ip.IP.input_hist_raw[hist_idx].strip().split()[-1]
133 145 def __str__(self):
134 146 ip = ipapi.get()
135 147 for cmd in reversed(ip.IP.input_hist_raw):
136 148 parts = cmd.strip().split()
137 149 if len(parts) < 2 or parts[-1] in ['$LA', 'LA()']:
138 150 continue
139 151 return parts[-1]
140 152 return ""
141 153
142 def dotslash_prefilter_f(self,line):
143 """ ./foo now runs foo as system command
154 def slash_prefilter_f(self,line):
155 """ ./foo, ~/foo and /bin/foo now run foo as system command
144 156
145 Removes the need for doing !./foo
157 Removes the need for doing !./foo, !~/foo or !/bin/foo
146 158 """
147 159 import IPython.genutils
148 if line.startswith("./"):
160 if re.match('(?:[.~]|/[a-zA-Z_0-9]+)/', line):
149 161 return "_ip.system(" + IPython.genutils.make_quoted_expr(line)+")"
150 162 raise ipapi.TryNext
151 163
152 164 # XXX You do not need to understand the next function!
153 165 # This should probably be moved out of profile
154 166
155 167 def extend_shell_behavior(ip):
156 168
157 169 # Instead of making signature a global variable tie it to IPSHELL.
158 170 # In future if it is required to distinguish between different
159 171 # shells we can assign a signature per shell basis
160 172 ip.IP.__sig__ = 0xa005
161 173 # mark the IPSHELL with this signature
162 174 ip.IP.user_ns['__builtins__'].__dict__['__sig__'] = ip.IP.__sig__
163 175
164 176 from IPython.Itpl import ItplNS
165 177 from IPython.genutils import shell
166 178 # utility to expand user variables via Itpl
167 179 # xxx do something sensible with depth?
168 180 ip.IP.var_expand = lambda cmd, lvars=None, depth=2: \
169 181 str(ItplNS(cmd, ip.IP.user_ns, get_locals()))
170 182
171 183 def get_locals():
172 184 """ Substituting a variable through Itpl deep inside the IPSHELL stack
173 185 requires the knowledge of all the variables in scope upto the last
174 186 IPSHELL frame. This routine simply merges all the local variables
175 187 on the IPSHELL stack without worrying about their scope rules
176 188 """
177 189 import sys
178 190 # note lambda expression constitues a function call
179 191 # hence fno should be incremented by one
180 192 getsig = lambda fno: sys._getframe(fno+1).f_globals \
181 193 ['__builtins__'].__dict__['__sig__']
182 194 getlvars = lambda fno: sys._getframe(fno+1).f_locals
183 195 # trackback until we enter the IPSHELL
184 196 frame_no = 1
185 197 sig = ip.IP.__sig__
186 198 fsig = ~sig
187 199 while fsig != sig :
188 200 try:
189 201 fsig = getsig(frame_no)
190 202 except (AttributeError, KeyError):
191 203 frame_no += 1
192 204 except ValueError:
193 205 # stack is depleted
194 206 # call did not originate from IPSHELL
195 207 return {}
196 208 first_frame = frame_no
197 209 # walk further back until we exit from IPSHELL or deplete stack
198 210 try:
199 211 while(sig == getsig(frame_no+1)):
200 212 frame_no += 1
201 213 except (AttributeError, KeyError, ValueError):
202 214 pass
203 215 # merge the locals from top down hence overriding
204 216 # any re-definitions of variables, functions etc.
205 217 lvars = {}
206 218 for fno in range(frame_no, first_frame-1, -1):
207 219 lvars.update(getlvars(fno))
208 220 #print '\n'*5, first_frame, frame_no, '\n', lvars, '\n'*5 #dbg
209 221 return lvars
210 222
211 223 def _runlines(lines):
212 224 """Run a string of one or more lines of source.
213 225
214 226 This method is capable of running a string containing multiple source
215 227 lines, as if they had been entered at the IPython prompt. Since it
216 228 exposes IPython's processing machinery, the given strings can contain
217 229 magic calls (%magic), special shell access (!cmd), etc."""
218 230
219 231 # We must start with a clean buffer, in case this is run from an
220 232 # interactive IPython session (via a magic, for example).
221 233 ip.IP.resetbuffer()
222 234 lines = lines.split('\n')
223 235 more = 0
224 236 command = ''
225 237 for line in lines:
226 238 # skip blank lines so we don't mess up the prompt counter, but do
227 239 # NOT skip even a blank line if we are in a code block (more is
228 240 # true)
229 241 # if command is not empty trim the line
230 242 if command != '' :
231 243 line = line.strip()
232 244 # add the broken line to the command
233 245 if line and line[-1] == '\\' :
234 246 command += line[0:-1] + ' '
235 247 more = True
236 248 continue
237 249 else :
238 250 # add the last (current) line to the command
239 251 command += line
240 252 if command or more:
241 253 # push to raw history, so hist line numbers stay in sync
242 254 ip.IP.input_hist_raw.append("# " + command + "\n")
243 255
244 256 more = ip.IP.push(ip.IP.prefilter(command,more))
245 257 command = ''
246 258 # IPython's runsource returns None if there was an error
247 259 # compiling the code. This allows us to stop processing right
248 260 # away, so the user gets the error message at the right place.
249 261 if more is None:
250 262 break
251 263 # final newline in case the input didn't have it, so that the code
252 264 # actually does get executed
253 265 if more:
254 266 ip.IP.push('\n')
255 267
256 268 ip.IP.runlines = _runlines
257 269
258 270 main()
1 NO CONTENT: modified file chmod 100755 => 100644
1 NO CONTENT: modified file chmod 100755 => 100644
1 NO CONTENT: modified file chmod 100755 => 100644
1 NO CONTENT: modified file chmod 100755 => 100644
@@ -1,3377 +1,3405
1 1 # -*- coding: utf-8 -*-
2 2 """Magic functions for InteractiveShell.
3 3
4 4 $Id: Magic.py 2996 2008-01-30 06:31:39Z fperez $"""
5 5
6 6 #*****************************************************************************
7 7 # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
8 8 # Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #*****************************************************************************
13 13
14 14 #****************************************************************************
15 15 # Modules and globals
16 16
17 17 from IPython import Release
18 18 __author__ = '%s <%s>\n%s <%s>' % \
19 19 ( Release.authors['Janko'] + Release.authors['Fernando'] )
20 20 __license__ = Release.license
21 21
22 22 # Python standard modules
23 23 import __builtin__
24 24 import bdb
25 25 import inspect
26 26 import os
27 27 import pdb
28 28 import pydoc
29 29 import sys
30 30 import re
31 31 import tempfile
32 32 import time
33 33 import cPickle as pickle
34 34 import textwrap
35 35 from cStringIO import StringIO
36 36 from getopt import getopt,GetoptError
37 37 from pprint import pprint, pformat
38 38 from sets import Set
39 39
40 40 # cProfile was added in Python2.5
41 41 try:
42 42 import cProfile as profile
43 43 import pstats
44 44 except ImportError:
45 45 # profile isn't bundled by default in Debian for license reasons
46 46 try:
47 47 import profile,pstats
48 48 except ImportError:
49 49 profile = pstats = None
50 50
51 51 # Homebrewed
52 52 import IPython
53 53 from IPython import Debugger, OInspect, wildcard
54 54 from IPython.FakeModule import FakeModule
55 55 from IPython.Itpl import Itpl, itpl, printpl,itplns
56 56 from IPython.PyColorize import Parser
57 57 from IPython.ipstruct import Struct
58 58 from IPython.macro import Macro
59 59 from IPython.genutils import *
60 60 from IPython import platutils
61 61 import IPython.generics
62 62 import IPython.ipapi
63 63 from IPython.ipapi import UsageError
64 64 from IPython.testing import decorators as testdec
65 65
66 66 #***************************************************************************
67 67 # Utility functions
68 68 def on_off(tag):
69 69 """Return an ON/OFF string for a 1/0 input. Simple utility function."""
70 70 return ['OFF','ON'][tag]
71 71
72 72 class Bunch: pass
73 73
74 74 def compress_dhist(dh):
75 75 head, tail = dh[:-10], dh[-10:]
76 76
77 77 newhead = []
78 78 done = Set()
79 79 for h in head:
80 80 if h in done:
81 81 continue
82 82 newhead.append(h)
83 83 done.add(h)
84 84
85 85 return newhead + tail
86 86
87 87
88 88 #***************************************************************************
89 89 # Main class implementing Magic functionality
90 90 class Magic:
91 91 """Magic functions for InteractiveShell.
92 92
93 93 Shell functions which can be reached as %function_name. All magic
94 94 functions should accept a string, which they can parse for their own
95 95 needs. This can make some functions easier to type, eg `%cd ../`
96 96 vs. `%cd("../")`
97 97
98 98 ALL definitions MUST begin with the prefix magic_. The user won't need it
99 99 at the command line, but it is is needed in the definition. """
100 100
101 101 # class globals
102 102 auto_status = ['Automagic is OFF, % prefix IS needed for magic functions.',
103 103 'Automagic is ON, % prefix NOT needed for magic functions.']
104 104
105 105 #......................................................................
106 106 # some utility functions
107 107
108 108 def __init__(self,shell):
109 109
110 110 self.options_table = {}
111 111 if profile is None:
112 112 self.magic_prun = self.profile_missing_notice
113 113 self.shell = shell
114 114
115 115 # namespace for holding state we may need
116 116 self._magic_state = Bunch()
117 117
118 118 def profile_missing_notice(self, *args, **kwargs):
119 119 error("""\
120 120 The profile module could not be found. It has been removed from the standard
121 121 python packages because of its non-free license. To use profiling, install the
122 122 python-profiler package from non-free.""")
123 123
124 124 def default_option(self,fn,optstr):
125 125 """Make an entry in the options_table for fn, with value optstr"""
126 126
127 127 if fn not in self.lsmagic():
128 128 error("%s is not a magic function" % fn)
129 129 self.options_table[fn] = optstr
130 130
131 131 def lsmagic(self):
132 132 """Return a list of currently available magic functions.
133 133
134 134 Gives a list of the bare names after mangling (['ls','cd', ...], not
135 135 ['magic_ls','magic_cd',...]"""
136 136
137 137 # FIXME. This needs a cleanup, in the way the magics list is built.
138 138
139 139 # magics in class definition
140 140 class_magic = lambda fn: fn.startswith('magic_') and \
141 141 callable(Magic.__dict__[fn])
142 142 # in instance namespace (run-time user additions)
143 143 inst_magic = lambda fn: fn.startswith('magic_') and \
144 144 callable(self.__dict__[fn])
145 145 # and bound magics by user (so they can access self):
146 146 inst_bound_magic = lambda fn: fn.startswith('magic_') and \
147 147 callable(self.__class__.__dict__[fn])
148 148 magics = filter(class_magic,Magic.__dict__.keys()) + \
149 149 filter(inst_magic,self.__dict__.keys()) + \
150 150 filter(inst_bound_magic,self.__class__.__dict__.keys())
151 151 out = []
152 152 for fn in Set(magics):
153 153 out.append(fn.replace('magic_','',1))
154 154 out.sort()
155 155 return out
156 156
157 157 def extract_input_slices(self,slices,raw=False):
158 158 """Return as a string a set of input history slices.
159 159
160 160 Inputs:
161 161
162 162 - slices: the set of slices is given as a list of strings (like
163 163 ['1','4:8','9'], since this function is for use by magic functions
164 164 which get their arguments as strings.
165 165
166 166 Optional inputs:
167 167
168 168 - raw(False): by default, the processed input is used. If this is
169 169 true, the raw input history is used instead.
170 170
171 171 Note that slices can be called with two notations:
172 172
173 173 N:M -> standard python form, means including items N...(M-1).
174 174
175 175 N-M -> include items N..M (closed endpoint)."""
176 176
177 177 if raw:
178 178 hist = self.shell.input_hist_raw
179 179 else:
180 180 hist = self.shell.input_hist
181 181
182 182 cmds = []
183 183 for chunk in slices:
184 184 if ':' in chunk:
185 185 ini,fin = map(int,chunk.split(':'))
186 186 elif '-' in chunk:
187 187 ini,fin = map(int,chunk.split('-'))
188 188 fin += 1
189 189 else:
190 190 ini = int(chunk)
191 191 fin = ini+1
192 192 cmds.append(hist[ini:fin])
193 193 return cmds
194 194
195 195 def _ofind(self, oname, namespaces=None):
196 196 """Find an object in the available namespaces.
197 197
198 198 self._ofind(oname) -> dict with keys: found,obj,ospace,ismagic
199 199
200 200 Has special code to detect magic functions.
201 201 """
202 202
203 203 oname = oname.strip()
204 204
205 205 alias_ns = None
206 206 if namespaces is None:
207 207 # Namespaces to search in:
208 208 # Put them in a list. The order is important so that we
209 209 # find things in the same order that Python finds them.
210 210 namespaces = [ ('Interactive', self.shell.user_ns),
211 211 ('IPython internal', self.shell.internal_ns),
212 212 ('Python builtin', __builtin__.__dict__),
213 213 ('Alias', self.shell.alias_table),
214 214 ]
215 215 alias_ns = self.shell.alias_table
216 216
217 217 # initialize results to 'null'
218 218 found = 0; obj = None; ospace = None; ds = None;
219 219 ismagic = 0; isalias = 0; parent = None
220 220
221 221 # Look for the given name by splitting it in parts. If the head is
222 222 # found, then we look for all the remaining parts as members, and only
223 223 # declare success if we can find them all.
224 224 oname_parts = oname.split('.')
225 225 oname_head, oname_rest = oname_parts[0],oname_parts[1:]
226 226 for nsname,ns in namespaces:
227 227 try:
228 228 obj = ns[oname_head]
229 229 except KeyError:
230 230 continue
231 231 else:
232 232 #print 'oname_rest:', oname_rest # dbg
233 233 for part in oname_rest:
234 234 try:
235 235 parent = obj
236 236 obj = getattr(obj,part)
237 237 except:
238 238 # Blanket except b/c some badly implemented objects
239 239 # allow __getattr__ to raise exceptions other than
240 240 # AttributeError, which then crashes IPython.
241 241 break
242 242 else:
243 243 # If we finish the for loop (no break), we got all members
244 244 found = 1
245 245 ospace = nsname
246 246 if ns == alias_ns:
247 247 isalias = 1
248 248 break # namespace loop
249 249
250 250 # Try to see if it's magic
251 251 if not found:
252 252 if oname.startswith(self.shell.ESC_MAGIC):
253 253 oname = oname[1:]
254 254 obj = getattr(self,'magic_'+oname,None)
255 255 if obj is not None:
256 256 found = 1
257 257 ospace = 'IPython internal'
258 258 ismagic = 1
259 259
260 260 # Last try: special-case some literals like '', [], {}, etc:
261 261 if not found and oname_head in ["''",'""','[]','{}','()']:
262 262 obj = eval(oname_head)
263 263 found = 1
264 264 ospace = 'Interactive'
265 265
266 266 return {'found':found, 'obj':obj, 'namespace':ospace,
267 267 'ismagic':ismagic, 'isalias':isalias, 'parent':parent}
268 268
269 269 def arg_err(self,func):
270 270 """Print docstring if incorrect arguments were passed"""
271 271 print 'Error in arguments:'
272 272 print OInspect.getdoc(func)
273 273
274 274 def format_latex(self,strng):
275 275 """Format a string for latex inclusion."""
276 276
277 277 # Characters that need to be escaped for latex:
278 278 escape_re = re.compile(r'(%|_|\$|#|&)',re.MULTILINE)
279 279 # Magic command names as headers:
280 280 cmd_name_re = re.compile(r'^(%s.*?):' % self.shell.ESC_MAGIC,
281 281 re.MULTILINE)
282 282 # Magic commands
283 283 cmd_re = re.compile(r'(?P<cmd>%s.+?\b)(?!\}\}:)' % self.shell.ESC_MAGIC,
284 284 re.MULTILINE)
285 285 # Paragraph continue
286 286 par_re = re.compile(r'\\$',re.MULTILINE)
287 287
288 288 # The "\n" symbol
289 289 newline_re = re.compile(r'\\n')
290 290
291 291 # Now build the string for output:
292 292 #strng = cmd_name_re.sub(r'\n\\texttt{\\textsl{\\large \1}}:',strng)
293 293 strng = cmd_name_re.sub(r'\n\\bigskip\n\\texttt{\\textbf{ \1}}:',
294 294 strng)
295 295 strng = cmd_re.sub(r'\\texttt{\g<cmd>}',strng)
296 296 strng = par_re.sub(r'\\\\',strng)
297 297 strng = escape_re.sub(r'\\\1',strng)
298 298 strng = newline_re.sub(r'\\textbackslash{}n',strng)
299 299 return strng
300 300
301 301 def format_screen(self,strng):
302 302 """Format a string for screen printing.
303 303
304 304 This removes some latex-type format codes."""
305 305 # Paragraph continue
306 306 par_re = re.compile(r'\\$',re.MULTILINE)
307 307 strng = par_re.sub('',strng)
308 308 return strng
309 309
310 310 def parse_options(self,arg_str,opt_str,*long_opts,**kw):
311 311 """Parse options passed to an argument string.
312 312
313 313 The interface is similar to that of getopt(), but it returns back a
314 314 Struct with the options as keys and the stripped argument string still
315 315 as a string.
316 316
317 317 arg_str is quoted as a true sys.argv vector by using shlex.split.
318 318 This allows us to easily expand variables, glob files, quote
319 319 arguments, etc.
320 320
321 321 Options:
322 322 -mode: default 'string'. If given as 'list', the argument string is
323 323 returned as a list (split on whitespace) instead of a string.
324 324
325 325 -list_all: put all option values in lists. Normally only options
326 326 appearing more than once are put in a list.
327 327
328 328 -posix (True): whether to split the input line in POSIX mode or not,
329 329 as per the conventions outlined in the shlex module from the
330 330 standard library."""
331 331
332 332 # inject default options at the beginning of the input line
333 333 caller = sys._getframe(1).f_code.co_name.replace('magic_','')
334 334 arg_str = '%s %s' % (self.options_table.get(caller,''),arg_str)
335 335
336 336 mode = kw.get('mode','string')
337 337 if mode not in ['string','list']:
338 338 raise ValueError,'incorrect mode given: %s' % mode
339 339 # Get options
340 340 list_all = kw.get('list_all',0)
341 341 posix = kw.get('posix',True)
342 342
343 343 # Check if we have more than one argument to warrant extra processing:
344 344 odict = {} # Dictionary with options
345 345 args = arg_str.split()
346 346 if len(args) >= 1:
347 347 # If the list of inputs only has 0 or 1 thing in it, there's no
348 348 # need to look for options
349 349 argv = arg_split(arg_str,posix)
350 350 # Do regular option processing
351 351 try:
352 352 opts,args = getopt(argv,opt_str,*long_opts)
353 353 except GetoptError,e:
354 354 raise UsageError('%s ( allowed: "%s" %s)' % (e.msg,opt_str,
355 355 " ".join(long_opts)))
356 356 for o,a in opts:
357 357 if o.startswith('--'):
358 358 o = o[2:]
359 359 else:
360 360 o = o[1:]
361 361 try:
362 362 odict[o].append(a)
363 363 except AttributeError:
364 364 odict[o] = [odict[o],a]
365 365 except KeyError:
366 366 if list_all:
367 367 odict[o] = [a]
368 368 else:
369 369 odict[o] = a
370 370
371 371 # Prepare opts,args for return
372 372 opts = Struct(odict)
373 373 if mode == 'string':
374 374 args = ' '.join(args)
375 375
376 376 return opts,args
377 377
378 378 #......................................................................
379 379 # And now the actual magic functions
380 380
381 381 # Functions for IPython shell work (vars,funcs, config, etc)
382 382 def magic_lsmagic(self, parameter_s = ''):
383 383 """List currently available magic functions."""
384 384 mesc = self.shell.ESC_MAGIC
385 385 print 'Available magic functions:\n'+mesc+\
386 386 (' '+mesc).join(self.lsmagic())
387 387 print '\n' + Magic.auto_status[self.shell.rc.automagic]
388 388 return None
389 389
390 390 def magic_magic(self, parameter_s = ''):
391 391 """Print information about the magic function system.
392 392
393 393 Supported formats: -latex, -brief, -rest
394 394 """
395 395
396 396 mode = ''
397 397 try:
398 398 if parameter_s.split()[0] == '-latex':
399 399 mode = 'latex'
400 400 if parameter_s.split()[0] == '-brief':
401 401 mode = 'brief'
402 402 if parameter_s.split()[0] == '-rest':
403 403 mode = 'rest'
404 404 rest_docs = []
405 405 except:
406 406 pass
407 407
408 408 magic_docs = []
409 409 for fname in self.lsmagic():
410 410 mname = 'magic_' + fname
411 411 for space in (Magic,self,self.__class__):
412 412 try:
413 413 fn = space.__dict__[mname]
414 414 except KeyError:
415 415 pass
416 416 else:
417 417 break
418 418 if mode == 'brief':
419 419 # only first line
420 420 if fn.__doc__:
421 421 fndoc = fn.__doc__.split('\n',1)[0]
422 422 else:
423 423 fndoc = 'No documentation'
424 424 else:
425 fndoc = fn.__doc__.rstrip()
425 if fn.__doc__:
426 fndoc = fn.__doc__.rstrip()
427 else:
428 fndoc = 'No documentation'
429
426 430
427 431 if mode == 'rest':
428 432 rest_docs.append('**%s%s**::\n\n\t%s\n\n' %(self.shell.ESC_MAGIC,
429 433 fname,fndoc))
430 434
431 435 else:
432 436 magic_docs.append('%s%s:\n\t%s\n' %(self.shell.ESC_MAGIC,
433 437 fname,fndoc))
434 438
435 439 magic_docs = ''.join(magic_docs)
436 440
437 441 if mode == 'rest':
438 442 return "".join(rest_docs)
439 443
440 444 if mode == 'latex':
441 445 print self.format_latex(magic_docs)
442 446 return
443 447 else:
444 448 magic_docs = self.format_screen(magic_docs)
445 449 if mode == 'brief':
446 450 return magic_docs
447 451
448 452 outmsg = """
449 453 IPython's 'magic' functions
450 454 ===========================
451 455
452 456 The magic function system provides a series of functions which allow you to
453 457 control the behavior of IPython itself, plus a lot of system-type
454 458 features. All these functions are prefixed with a % character, but parameters
455 459 are given without parentheses or quotes.
456 460
457 461 NOTE: If you have 'automagic' enabled (via the command line option or with the
458 462 %automagic function), you don't need to type in the % explicitly. By default,
459 463 IPython ships with automagic on, so you should only rarely need the % escape.
460 464
461 465 Example: typing '%cd mydir' (without the quotes) changes you working directory
462 466 to 'mydir', if it exists.
463 467
464 468 You can define your own magic functions to extend the system. See the supplied
465 469 ipythonrc and example-magic.py files for details (in your ipython
466 470 configuration directory, typically $HOME/.ipython/).
467 471
468 472 You can also define your own aliased names for magic functions. In your
469 473 ipythonrc file, placing a line like:
470 474
471 475 execute __IPYTHON__.magic_pf = __IPYTHON__.magic_profile
472 476
473 477 will define %pf as a new name for %profile.
474 478
475 479 You can also call magics in code using the ipmagic() function, which IPython
476 480 automatically adds to the builtin namespace. Type 'ipmagic?' for details.
477 481
478 482 For a list of the available magic functions, use %lsmagic. For a description
479 483 of any of them, type %magic_name?, e.g. '%cd?'.
480 484
481 485 Currently the magic system has the following functions:\n"""
482 486
483 487 mesc = self.shell.ESC_MAGIC
484 488 outmsg = ("%s\n%s\n\nSummary of magic functions (from %slsmagic):"
485 489 "\n\n%s%s\n\n%s" % (outmsg,
486 490 magic_docs,mesc,mesc,
487 491 (' '+mesc).join(self.lsmagic()),
488 492 Magic.auto_status[self.shell.rc.automagic] ) )
489 493
490 494 page(outmsg,screen_lines=self.shell.rc.screen_length)
491 495
492 496
493 497 def magic_autoindent(self, parameter_s = ''):
494 498 """Toggle autoindent on/off (if available)."""
495 499
496 500 self.shell.set_autoindent()
497 501 print "Automatic indentation is:",['OFF','ON'][self.shell.autoindent]
498 502
499 503
500 504 def magic_automagic(self, parameter_s = ''):
501 505 """Make magic functions callable without having to type the initial %.
502 506
503 507 Without argumentsl toggles on/off (when off, you must call it as
504 508 %automagic, of course). With arguments it sets the value, and you can
505 509 use any of (case insensitive):
506 510
507 511 - on,1,True: to activate
508 512
509 513 - off,0,False: to deactivate.
510 514
511 515 Note that magic functions have lowest priority, so if there's a
512 516 variable whose name collides with that of a magic fn, automagic won't
513 517 work for that function (you get the variable instead). However, if you
514 518 delete the variable (del var), the previously shadowed magic function
515 519 becomes visible to automagic again."""
516 520
517 521 rc = self.shell.rc
518 522 arg = parameter_s.lower()
519 523 if parameter_s in ('on','1','true'):
520 524 rc.automagic = True
521 525 elif parameter_s in ('off','0','false'):
522 526 rc.automagic = False
523 527 else:
524 528 rc.automagic = not rc.automagic
525 529 print '\n' + Magic.auto_status[rc.automagic]
526 530
527 531 @testdec.skip_doctest
528 532 def magic_autocall(self, parameter_s = ''):
529 533 """Make functions callable without having to type parentheses.
530 534
531 535 Usage:
532 536
533 537 %autocall [mode]
534 538
535 539 The mode can be one of: 0->Off, 1->Smart, 2->Full. If not given, the
536 540 value is toggled on and off (remembering the previous state).
537 541
538 542 In more detail, these values mean:
539 543
540 544 0 -> fully disabled
541 545
542 546 1 -> active, but do not apply if there are no arguments on the line.
543 547
544 548 In this mode, you get:
545 549
546 550 In [1]: callable
547 551 Out[1]: <built-in function callable>
548 552
549 553 In [2]: callable 'hello'
550 554 ------> callable('hello')
551 555 Out[2]: False
552 556
553 557 2 -> Active always. Even if no arguments are present, the callable
554 558 object is called:
555 559
556 560 In [2]: float
557 561 ------> float()
558 562 Out[2]: 0.0
559 563
560 564 Note that even with autocall off, you can still use '/' at the start of
561 565 a line to treat the first argument on the command line as a function
562 566 and add parentheses to it:
563 567
564 568 In [8]: /str 43
565 569 ------> str(43)
566 570 Out[8]: '43'
567 571
568 572 # all-random (note for auto-testing)
569 573 """
570 574
571 575 rc = self.shell.rc
572 576
573 577 if parameter_s:
574 578 arg = int(parameter_s)
575 579 else:
576 580 arg = 'toggle'
577 581
578 582 if not arg in (0,1,2,'toggle'):
579 583 error('Valid modes: (0->Off, 1->Smart, 2->Full')
580 584 return
581 585
582 586 if arg in (0,1,2):
583 587 rc.autocall = arg
584 588 else: # toggle
585 589 if rc.autocall:
586 590 self._magic_state.autocall_save = rc.autocall
587 591 rc.autocall = 0
588 592 else:
589 593 try:
590 594 rc.autocall = self._magic_state.autocall_save
591 595 except AttributeError:
592 596 rc.autocall = self._magic_state.autocall_save = 1
593 597
594 598 print "Automatic calling is:",['OFF','Smart','Full'][rc.autocall]
595 599
596 600 def magic_system_verbose(self, parameter_s = ''):
597 601 """Set verbose printing of system calls.
598 602
599 603 If called without an argument, act as a toggle"""
600 604
601 605 if parameter_s:
602 606 val = bool(eval(parameter_s))
603 607 else:
604 608 val = None
605 609
606 610 self.shell.rc_set_toggle('system_verbose',val)
607 611 print "System verbose printing is:",\
608 612 ['OFF','ON'][self.shell.rc.system_verbose]
609 613
610 614
611 615 def magic_page(self, parameter_s=''):
612 616 """Pretty print the object and display it through a pager.
613 617
614 618 %page [options] OBJECT
615 619
616 620 If no object is given, use _ (last output).
617 621
618 622 Options:
619 623
620 624 -r: page str(object), don't pretty-print it."""
621 625
622 626 # After a function contributed by Olivier Aubert, slightly modified.
623 627
624 628 # Process options/args
625 629 opts,args = self.parse_options(parameter_s,'r')
626 630 raw = 'r' in opts
627 631
628 632 oname = args and args or '_'
629 633 info = self._ofind(oname)
630 634 if info['found']:
631 635 txt = (raw and str or pformat)( info['obj'] )
632 636 page(txt)
633 637 else:
634 638 print 'Object `%s` not found' % oname
635 639
636 640 def magic_profile(self, parameter_s=''):
637 641 """Print your currently active IPyhton profile."""
638 642 if self.shell.rc.profile:
639 643 printpl('Current IPython profile: $self.shell.rc.profile.')
640 644 else:
641 645 print 'No profile active.'
642 646
643 647 def magic_pinfo(self, parameter_s='', namespaces=None):
644 648 """Provide detailed information about an object.
645 649
646 650 '%pinfo object' is just a synonym for object? or ?object."""
647 651
648 652 #print 'pinfo par: <%s>' % parameter_s # dbg
649 653
650 654
651 655 # detail_level: 0 -> obj? , 1 -> obj??
652 656 detail_level = 0
653 657 # We need to detect if we got called as 'pinfo pinfo foo', which can
654 658 # happen if the user types 'pinfo foo?' at the cmd line.
655 659 pinfo,qmark1,oname,qmark2 = \
656 660 re.match('(pinfo )?(\?*)(.*?)(\??$)',parameter_s).groups()
657 661 if pinfo or qmark1 or qmark2:
658 662 detail_level = 1
659 663 if "*" in oname:
660 664 self.magic_psearch(oname)
661 665 else:
662 666 self._inspect('pinfo', oname, detail_level=detail_level,
663 667 namespaces=namespaces)
664 668
665 669 def magic_pdef(self, parameter_s='', namespaces=None):
666 670 """Print the definition header for any callable object.
667 671
668 672 If the object is a class, print the constructor information."""
669 673 self._inspect('pdef',parameter_s, namespaces)
670 674
671 675 def magic_pdoc(self, parameter_s='', namespaces=None):
672 676 """Print the docstring for an object.
673 677
674 678 If the given object is a class, it will print both the class and the
675 679 constructor docstrings."""
676 680 self._inspect('pdoc',parameter_s, namespaces)
677 681
678 682 def magic_psource(self, parameter_s='', namespaces=None):
679 683 """Print (or run through pager) the source code for an object."""
680 684 self._inspect('psource',parameter_s, namespaces)
681 685
682 686 def magic_pfile(self, parameter_s=''):
683 687 """Print (or run through pager) the file where an object is defined.
684 688
685 689 The file opens at the line where the object definition begins. IPython
686 690 will honor the environment variable PAGER if set, and otherwise will
687 691 do its best to print the file in a convenient form.
688 692
689 693 If the given argument is not an object currently defined, IPython will
690 694 try to interpret it as a filename (automatically adding a .py extension
691 695 if needed). You can thus use %pfile as a syntax highlighting code
692 696 viewer."""
693 697
694 698 # first interpret argument as an object name
695 699 out = self._inspect('pfile',parameter_s)
696 700 # if not, try the input as a filename
697 701 if out == 'not found':
698 702 try:
699 703 filename = get_py_filename(parameter_s)
700 704 except IOError,msg:
701 705 print msg
702 706 return
703 707 page(self.shell.inspector.format(file(filename).read()))
704 708
705 709 def _inspect(self,meth,oname,namespaces=None,**kw):
706 710 """Generic interface to the inspector system.
707 711
708 712 This function is meant to be called by pdef, pdoc & friends."""
709 713
710 714 #oname = oname.strip()
711 715 #print '1- oname: <%r>' % oname # dbg
712 716 try:
713 717 oname = oname.strip().encode('ascii')
714 718 #print '2- oname: <%r>' % oname # dbg
715 719 except UnicodeEncodeError:
716 720 print 'Python identifiers can only contain ascii characters.'
717 721 return 'not found'
718 722
719 723 info = Struct(self._ofind(oname, namespaces))
720 724
721 725 if info.found:
722 726 try:
723 727 IPython.generics.inspect_object(info.obj)
724 728 return
725 729 except IPython.ipapi.TryNext:
726 730 pass
727 731 # Get the docstring of the class property if it exists.
728 732 path = oname.split('.')
729 733 root = '.'.join(path[:-1])
730 734 if info.parent is not None:
731 735 try:
732 736 target = getattr(info.parent, '__class__')
733 737 # The object belongs to a class instance.
734 738 try:
735 739 target = getattr(target, path[-1])
736 740 # The class defines the object.
737 741 if isinstance(target, property):
738 742 oname = root + '.__class__.' + path[-1]
739 743 info = Struct(self._ofind(oname))
740 744 except AttributeError: pass
741 745 except AttributeError: pass
742 746
743 747 pmethod = getattr(self.shell.inspector,meth)
744 748 formatter = info.ismagic and self.format_screen or None
745 749 if meth == 'pdoc':
746 750 pmethod(info.obj,oname,formatter)
747 751 elif meth == 'pinfo':
748 752 pmethod(info.obj,oname,formatter,info,**kw)
749 753 else:
750 754 pmethod(info.obj,oname)
751 755 else:
752 756 print 'Object `%s` not found.' % oname
753 757 return 'not found' # so callers can take other action
754 758
755 759 def magic_psearch(self, parameter_s=''):
756 760 """Search for object in namespaces by wildcard.
757 761
758 762 %psearch [options] PATTERN [OBJECT TYPE]
759 763
760 764 Note: ? can be used as a synonym for %psearch, at the beginning or at
761 765 the end: both a*? and ?a* are equivalent to '%psearch a*'. Still, the
762 766 rest of the command line must be unchanged (options come first), so
763 767 for example the following forms are equivalent
764 768
765 769 %psearch -i a* function
766 770 -i a* function?
767 771 ?-i a* function
768 772
769 773 Arguments:
770 774
771 775 PATTERN
772 776
773 777 where PATTERN is a string containing * as a wildcard similar to its
774 778 use in a shell. The pattern is matched in all namespaces on the
775 779 search path. By default objects starting with a single _ are not
776 780 matched, many IPython generated objects have a single
777 781 underscore. The default is case insensitive matching. Matching is
778 782 also done on the attributes of objects and not only on the objects
779 783 in a module.
780 784
781 785 [OBJECT TYPE]
782 786
783 787 Is the name of a python type from the types module. The name is
784 788 given in lowercase without the ending type, ex. StringType is
785 789 written string. By adding a type here only objects matching the
786 790 given type are matched. Using all here makes the pattern match all
787 791 types (this is the default).
788 792
789 793 Options:
790 794
791 795 -a: makes the pattern match even objects whose names start with a
792 796 single underscore. These names are normally ommitted from the
793 797 search.
794 798
795 799 -i/-c: make the pattern case insensitive/sensitive. If neither of
796 800 these options is given, the default is read from your ipythonrc
797 801 file. The option name which sets this value is
798 802 'wildcards_case_sensitive'. If this option is not specified in your
799 803 ipythonrc file, IPython's internal default is to do a case sensitive
800 804 search.
801 805
802 806 -e/-s NAMESPACE: exclude/search a given namespace. The pattern you
803 807 specifiy can be searched in any of the following namespaces:
804 808 'builtin', 'user', 'user_global','internal', 'alias', where
805 809 'builtin' and 'user' are the search defaults. Note that you should
806 810 not use quotes when specifying namespaces.
807 811
808 812 'Builtin' contains the python module builtin, 'user' contains all
809 813 user data, 'alias' only contain the shell aliases and no python
810 814 objects, 'internal' contains objects used by IPython. The
811 815 'user_global' namespace is only used by embedded IPython instances,
812 816 and it contains module-level globals. You can add namespaces to the
813 817 search with -s or exclude them with -e (these options can be given
814 818 more than once).
815 819
816 820 Examples:
817 821
818 822 %psearch a* -> objects beginning with an a
819 823 %psearch -e builtin a* -> objects NOT in the builtin space starting in a
820 824 %psearch a* function -> all functions beginning with an a
821 825 %psearch re.e* -> objects beginning with an e in module re
822 826 %psearch r*.e* -> objects that start with e in modules starting in r
823 827 %psearch r*.* string -> all strings in modules beginning with r
824 828
825 829 Case sensitve search:
826 830
827 831 %psearch -c a* list all object beginning with lower case a
828 832
829 833 Show objects beginning with a single _:
830 834
831 835 %psearch -a _* list objects beginning with a single underscore"""
832 836 try:
833 837 parameter_s = parameter_s.encode('ascii')
834 838 except UnicodeEncodeError:
835 839 print 'Python identifiers can only contain ascii characters.'
836 840 return
837 841
838 842 # default namespaces to be searched
839 843 def_search = ['user','builtin']
840 844
841 845 # Process options/args
842 846 opts,args = self.parse_options(parameter_s,'cias:e:',list_all=True)
843 847 opt = opts.get
844 848 shell = self.shell
845 849 psearch = shell.inspector.psearch
846 850
847 851 # select case options
848 852 if opts.has_key('i'):
849 853 ignore_case = True
850 854 elif opts.has_key('c'):
851 855 ignore_case = False
852 856 else:
853 857 ignore_case = not shell.rc.wildcards_case_sensitive
854 858
855 859 # Build list of namespaces to search from user options
856 860 def_search.extend(opt('s',[]))
857 861 ns_exclude = ns_exclude=opt('e',[])
858 862 ns_search = [nm for nm in def_search if nm not in ns_exclude]
859 863
860 864 # Call the actual search
861 865 try:
862 866 psearch(args,shell.ns_table,ns_search,
863 867 show_all=opt('a'),ignore_case=ignore_case)
864 868 except:
865 869 shell.showtraceback()
866 870
867 871 def magic_who_ls(self, parameter_s=''):
868 872 """Return a sorted list of all interactive variables.
869 873
870 874 If arguments are given, only variables of types matching these
871 875 arguments are returned."""
872 876
873 877 user_ns = self.shell.user_ns
874 878 internal_ns = self.shell.internal_ns
875 879 user_config_ns = self.shell.user_config_ns
876 880 out = []
877 881 typelist = parameter_s.split()
878 882
879 883 for i in user_ns:
880 884 if not (i.startswith('_') or i.startswith('_i')) \
881 885 and not (i in internal_ns or i in user_config_ns):
882 886 if typelist:
883 887 if type(user_ns[i]).__name__ in typelist:
884 888 out.append(i)
885 889 else:
886 890 out.append(i)
887 891 out.sort()
888 892 return out
889 893
890 894 def magic_who(self, parameter_s=''):
891 895 """Print all interactive variables, with some minimal formatting.
892 896
893 897 If any arguments are given, only variables whose type matches one of
894 898 these are printed. For example:
895 899
896 900 %who function str
897 901
898 902 will only list functions and strings, excluding all other types of
899 903 variables. To find the proper type names, simply use type(var) at a
900 904 command line to see how python prints type names. For example:
901 905
902 906 In [1]: type('hello')\\
903 907 Out[1]: <type 'str'>
904 908
905 909 indicates that the type name for strings is 'str'.
906 910
907 911 %who always excludes executed names loaded through your configuration
908 912 file and things which are internal to IPython.
909 913
910 914 This is deliberate, as typically you may load many modules and the
911 915 purpose of %who is to show you only what you've manually defined."""
912 916
913 917 varlist = self.magic_who_ls(parameter_s)
914 918 if not varlist:
915 919 if parameter_s:
916 920 print 'No variables match your requested type.'
917 921 else:
918 922 print 'Interactive namespace is empty.'
919 923 return
920 924
921 925 # if we have variables, move on...
922 926 count = 0
923 927 for i in varlist:
924 928 print i+'\t',
925 929 count += 1
926 930 if count > 8:
927 931 count = 0
928 932 print
929 933 print
930 934
931 935 def magic_whos(self, parameter_s=''):
932 936 """Like %who, but gives some extra information about each variable.
933 937
934 938 The same type filtering of %who can be applied here.
935 939
936 940 For all variables, the type is printed. Additionally it prints:
937 941
938 942 - For {},[],(): their length.
939 943
940 944 - For numpy and Numeric arrays, a summary with shape, number of
941 945 elements, typecode and size in memory.
942 946
943 947 - Everything else: a string representation, snipping their middle if
944 948 too long."""
945 949
946 950 varnames = self.magic_who_ls(parameter_s)
947 951 if not varnames:
948 952 if parameter_s:
949 953 print 'No variables match your requested type.'
950 954 else:
951 955 print 'Interactive namespace is empty.'
952 956 return
953 957
954 958 # if we have variables, move on...
955 959
956 960 # for these types, show len() instead of data:
957 961 seq_types = [types.DictType,types.ListType,types.TupleType]
958 962
959 963 # for numpy/Numeric arrays, display summary info
960 964 try:
961 965 import numpy
962 966 except ImportError:
963 967 ndarray_type = None
964 968 else:
965 969 ndarray_type = numpy.ndarray.__name__
966 970 try:
967 971 import Numeric
968 972 except ImportError:
969 973 array_type = None
970 974 else:
971 975 array_type = Numeric.ArrayType.__name__
972 976
973 977 # Find all variable names and types so we can figure out column sizes
974 978 def get_vars(i):
975 979 return self.shell.user_ns[i]
976 980
977 981 # some types are well known and can be shorter
978 982 abbrevs = {'IPython.macro.Macro' : 'Macro'}
979 983 def type_name(v):
980 984 tn = type(v).__name__
981 985 return abbrevs.get(tn,tn)
982 986
983 987 varlist = map(get_vars,varnames)
984 988
985 989 typelist = []
986 990 for vv in varlist:
987 991 tt = type_name(vv)
988 992
989 993 if tt=='instance':
990 994 typelist.append( abbrevs.get(str(vv.__class__),
991 995 str(vv.__class__)))
992 996 else:
993 997 typelist.append(tt)
994 998
995 999 # column labels and # of spaces as separator
996 1000 varlabel = 'Variable'
997 1001 typelabel = 'Type'
998 1002 datalabel = 'Data/Info'
999 1003 colsep = 3
1000 1004 # variable format strings
1001 1005 vformat = "$vname.ljust(varwidth)$vtype.ljust(typewidth)"
1002 1006 vfmt_short = '$vstr[:25]<...>$vstr[-25:]'
1003 1007 aformat = "%s: %s elems, type `%s`, %s bytes"
1004 1008 # find the size of the columns to format the output nicely
1005 1009 varwidth = max(max(map(len,varnames)), len(varlabel)) + colsep
1006 1010 typewidth = max(max(map(len,typelist)), len(typelabel)) + colsep
1007 1011 # table header
1008 1012 print varlabel.ljust(varwidth) + typelabel.ljust(typewidth) + \
1009 1013 ' '+datalabel+'\n' + '-'*(varwidth+typewidth+len(datalabel)+1)
1010 1014 # and the table itself
1011 1015 kb = 1024
1012 1016 Mb = 1048576 # kb**2
1013 1017 for vname,var,vtype in zip(varnames,varlist,typelist):
1014 1018 print itpl(vformat),
1015 1019 if vtype in seq_types:
1016 1020 print len(var)
1017 1021 elif vtype in [array_type,ndarray_type]:
1018 1022 vshape = str(var.shape).replace(',','').replace(' ','x')[1:-1]
1019 1023 if vtype==ndarray_type:
1020 1024 # numpy
1021 1025 vsize = var.size
1022 1026 vbytes = vsize*var.itemsize
1023 1027 vdtype = var.dtype
1024 1028 else:
1025 1029 # Numeric
1026 1030 vsize = Numeric.size(var)
1027 1031 vbytes = vsize*var.itemsize()
1028 1032 vdtype = var.typecode()
1029 1033
1030 1034 if vbytes < 100000:
1031 1035 print aformat % (vshape,vsize,vdtype,vbytes)
1032 1036 else:
1033 1037 print aformat % (vshape,vsize,vdtype,vbytes),
1034 1038 if vbytes < Mb:
1035 1039 print '(%s kb)' % (vbytes/kb,)
1036 1040 else:
1037 1041 print '(%s Mb)' % (vbytes/Mb,)
1038 1042 else:
1039 1043 try:
1040 1044 vstr = str(var)
1041 1045 except UnicodeEncodeError:
1042 1046 vstr = unicode(var).encode(sys.getdefaultencoding(),
1043 1047 'backslashreplace')
1044 1048 vstr = vstr.replace('\n','\\n')
1045 1049 if len(vstr) < 50:
1046 1050 print vstr
1047 1051 else:
1048 1052 printpl(vfmt_short)
1049 1053
1050 1054 def magic_reset(self, parameter_s=''):
1051 1055 """Resets the namespace by removing all names defined by the user.
1052 1056
1053 1057 Input/Output history are left around in case you need them."""
1054 1058
1055 1059 ans = self.shell.ask_yes_no(
1056 1060 "Once deleted, variables cannot be recovered. Proceed (y/[n])? ")
1057 1061 if not ans:
1058 1062 print 'Nothing done.'
1059 1063 return
1060 1064 user_ns = self.shell.user_ns
1061 1065 for i in self.magic_who_ls():
1062 1066 del(user_ns[i])
1063 1067
1064 1068 # Also flush the private list of module references kept for script
1065 1069 # execution protection
1066 1070 self.shell._user_main_modules[:] = []
1067 1071
1068 1072 def magic_logstart(self,parameter_s=''):
1069 1073 """Start logging anywhere in a session.
1070 1074
1071 1075 %logstart [-o|-r|-t] [log_name [log_mode]]
1072 1076
1073 1077 If no name is given, it defaults to a file named 'ipython_log.py' in your
1074 1078 current directory, in 'rotate' mode (see below).
1075 1079
1076 1080 '%logstart name' saves to file 'name' in 'backup' mode. It saves your
1077 1081 history up to that point and then continues logging.
1078 1082
1079 1083 %logstart takes a second optional parameter: logging mode. This can be one
1080 1084 of (note that the modes are given unquoted):\\
1081 1085 append: well, that says it.\\
1082 1086 backup: rename (if exists) to name~ and start name.\\
1083 1087 global: single logfile in your home dir, appended to.\\
1084 1088 over : overwrite existing log.\\
1085 1089 rotate: create rotating logs name.1~, name.2~, etc.
1086 1090
1087 1091 Options:
1088 1092
1089 1093 -o: log also IPython's output. In this mode, all commands which
1090 1094 generate an Out[NN] prompt are recorded to the logfile, right after
1091 1095 their corresponding input line. The output lines are always
1092 1096 prepended with a '#[Out]# ' marker, so that the log remains valid
1093 1097 Python code.
1094 1098
1095 1099 Since this marker is always the same, filtering only the output from
1096 1100 a log is very easy, using for example a simple awk call:
1097 1101
1098 1102 awk -F'#\\[Out\\]# ' '{if($2) {print $2}}' ipython_log.py
1099 1103
1100 1104 -r: log 'raw' input. Normally, IPython's logs contain the processed
1101 1105 input, so that user lines are logged in their final form, converted
1102 1106 into valid Python. For example, %Exit is logged as
1103 1107 '_ip.magic("Exit"). If the -r flag is given, all input is logged
1104 1108 exactly as typed, with no transformations applied.
1105 1109
1106 1110 -t: put timestamps before each input line logged (these are put in
1107 1111 comments)."""
1108 1112
1109 1113 opts,par = self.parse_options(parameter_s,'ort')
1110 1114 log_output = 'o' in opts
1111 1115 log_raw_input = 'r' in opts
1112 1116 timestamp = 't' in opts
1113 1117
1114 1118 rc = self.shell.rc
1115 1119 logger = self.shell.logger
1116 1120
1117 1121 # if no args are given, the defaults set in the logger constructor by
1118 1122 # ipytohn remain valid
1119 1123 if par:
1120 1124 try:
1121 1125 logfname,logmode = par.split()
1122 1126 except:
1123 1127 logfname = par
1124 1128 logmode = 'backup'
1125 1129 else:
1126 1130 logfname = logger.logfname
1127 1131 logmode = logger.logmode
1128 1132 # put logfname into rc struct as if it had been called on the command
1129 1133 # line, so it ends up saved in the log header Save it in case we need
1130 1134 # to restore it...
1131 1135 old_logfile = rc.opts.get('logfile','')
1132 1136 if logfname:
1133 1137 logfname = os.path.expanduser(logfname)
1134 1138 rc.opts.logfile = logfname
1135 1139 loghead = self.shell.loghead_tpl % (rc.opts,rc.args)
1136 1140 try:
1137 1141 started = logger.logstart(logfname,loghead,logmode,
1138 1142 log_output,timestamp,log_raw_input)
1139 1143 except:
1140 1144 rc.opts.logfile = old_logfile
1141 1145 warn("Couldn't start log: %s" % sys.exc_info()[1])
1142 1146 else:
1143 1147 # log input history up to this point, optionally interleaving
1144 1148 # output if requested
1145 1149
1146 1150 if timestamp:
1147 1151 # disable timestamping for the previous history, since we've
1148 1152 # lost those already (no time machine here).
1149 1153 logger.timestamp = False
1150 1154
1151 1155 if log_raw_input:
1152 1156 input_hist = self.shell.input_hist_raw
1153 1157 else:
1154 1158 input_hist = self.shell.input_hist
1155 1159
1156 1160 if log_output:
1157 1161 log_write = logger.log_write
1158 1162 output_hist = self.shell.output_hist
1159 1163 for n in range(1,len(input_hist)-1):
1160 1164 log_write(input_hist[n].rstrip())
1161 1165 if n in output_hist:
1162 1166 log_write(repr(output_hist[n]),'output')
1163 1167 else:
1164 1168 logger.log_write(input_hist[1:])
1165 1169 if timestamp:
1166 1170 # re-enable timestamping
1167 1171 logger.timestamp = True
1168 1172
1169 1173 print ('Activating auto-logging. '
1170 1174 'Current session state plus future input saved.')
1171 1175 logger.logstate()
1172 1176
1173 1177 def magic_logstop(self,parameter_s=''):
1174 1178 """Fully stop logging and close log file.
1175 1179
1176 1180 In order to start logging again, a new %logstart call needs to be made,
1177 1181 possibly (though not necessarily) with a new filename, mode and other
1178 1182 options."""
1179 1183 self.logger.logstop()
1180 1184
1181 1185 def magic_logoff(self,parameter_s=''):
1182 1186 """Temporarily stop logging.
1183 1187
1184 1188 You must have previously started logging."""
1185 1189 self.shell.logger.switch_log(0)
1186 1190
1187 1191 def magic_logon(self,parameter_s=''):
1188 1192 """Restart logging.
1189 1193
1190 1194 This function is for restarting logging which you've temporarily
1191 1195 stopped with %logoff. For starting logging for the first time, you
1192 1196 must use the %logstart function, which allows you to specify an
1193 1197 optional log filename."""
1194 1198
1195 1199 self.shell.logger.switch_log(1)
1196 1200
1197 1201 def magic_logstate(self,parameter_s=''):
1198 1202 """Print the status of the logging system."""
1199 1203
1200 1204 self.shell.logger.logstate()
1201 1205
1202 1206 def magic_pdb(self, parameter_s=''):
1203 1207 """Control the automatic calling of the pdb interactive debugger.
1204 1208
1205 1209 Call as '%pdb on', '%pdb 1', '%pdb off' or '%pdb 0'. If called without
1206 1210 argument it works as a toggle.
1207 1211
1208 1212 When an exception is triggered, IPython can optionally call the
1209 1213 interactive pdb debugger after the traceback printout. %pdb toggles
1210 1214 this feature on and off.
1211 1215
1212 1216 The initial state of this feature is set in your ipythonrc
1213 1217 configuration file (the variable is called 'pdb').
1214 1218
1215 1219 If you want to just activate the debugger AFTER an exception has fired,
1216 1220 without having to type '%pdb on' and rerunning your code, you can use
1217 1221 the %debug magic."""
1218 1222
1219 1223 par = parameter_s.strip().lower()
1220 1224
1221 1225 if par:
1222 1226 try:
1223 1227 new_pdb = {'off':0,'0':0,'on':1,'1':1}[par]
1224 1228 except KeyError:
1225 1229 print ('Incorrect argument. Use on/1, off/0, '
1226 1230 'or nothing for a toggle.')
1227 1231 return
1228 1232 else:
1229 1233 # toggle
1230 1234 new_pdb = not self.shell.call_pdb
1231 1235
1232 1236 # set on the shell
1233 1237 self.shell.call_pdb = new_pdb
1234 1238 print 'Automatic pdb calling has been turned',on_off(new_pdb)
1235 1239
1236 1240 def magic_debug(self, parameter_s=''):
1237 1241 """Activate the interactive debugger in post-mortem mode.
1238 1242
1239 1243 If an exception has just occurred, this lets you inspect its stack
1240 1244 frames interactively. Note that this will always work only on the last
1241 1245 traceback that occurred, so you must call this quickly after an
1242 1246 exception that you wish to inspect has fired, because if another one
1243 1247 occurs, it clobbers the previous one.
1244 1248
1245 1249 If you want IPython to automatically do this on every exception, see
1246 1250 the %pdb magic for more details.
1247 1251 """
1248 1252
1249 1253 self.shell.debugger(force=True)
1250 1254
1251 1255 @testdec.skip_doctest
1252 1256 def magic_prun(self, parameter_s ='',user_mode=1,
1253 1257 opts=None,arg_lst=None,prog_ns=None):
1254 1258
1255 1259 """Run a statement through the python code profiler.
1256 1260
1257 1261 Usage:
1258 1262 %prun [options] statement
1259 1263
1260 1264 The given statement (which doesn't require quote marks) is run via the
1261 1265 python profiler in a manner similar to the profile.run() function.
1262 1266 Namespaces are internally managed to work correctly; profile.run
1263 1267 cannot be used in IPython because it makes certain assumptions about
1264 1268 namespaces which do not hold under IPython.
1265 1269
1266 1270 Options:
1267 1271
1268 1272 -l <limit>: you can place restrictions on what or how much of the
1269 1273 profile gets printed. The limit value can be:
1270 1274
1271 1275 * A string: only information for function names containing this string
1272 1276 is printed.
1273 1277
1274 1278 * An integer: only these many lines are printed.
1275 1279
1276 1280 * A float (between 0 and 1): this fraction of the report is printed
1277 1281 (for example, use a limit of 0.4 to see the topmost 40% only).
1278 1282
1279 1283 You can combine several limits with repeated use of the option. For
1280 1284 example, '-l __init__ -l 5' will print only the topmost 5 lines of
1281 1285 information about class constructors.
1282 1286
1283 1287 -r: return the pstats.Stats object generated by the profiling. This
1284 1288 object has all the information about the profile in it, and you can
1285 1289 later use it for further analysis or in other functions.
1286 1290
1287 1291 -s <key>: sort profile by given key. You can provide more than one key
1288 1292 by using the option several times: '-s key1 -s key2 -s key3...'. The
1289 1293 default sorting key is 'time'.
1290 1294
1291 1295 The following is copied verbatim from the profile documentation
1292 1296 referenced below:
1293 1297
1294 1298 When more than one key is provided, additional keys are used as
1295 1299 secondary criteria when the there is equality in all keys selected
1296 1300 before them.
1297 1301
1298 1302 Abbreviations can be used for any key names, as long as the
1299 1303 abbreviation is unambiguous. The following are the keys currently
1300 1304 defined:
1301 1305
1302 1306 Valid Arg Meaning
1303 1307 "calls" call count
1304 1308 "cumulative" cumulative time
1305 1309 "file" file name
1306 1310 "module" file name
1307 1311 "pcalls" primitive call count
1308 1312 "line" line number
1309 1313 "name" function name
1310 1314 "nfl" name/file/line
1311 1315 "stdname" standard name
1312 1316 "time" internal time
1313 1317
1314 1318 Note that all sorts on statistics are in descending order (placing
1315 1319 most time consuming items first), where as name, file, and line number
1316 1320 searches are in ascending order (i.e., alphabetical). The subtle
1317 1321 distinction between "nfl" and "stdname" is that the standard name is a
1318 1322 sort of the name as printed, which means that the embedded line
1319 1323 numbers get compared in an odd way. For example, lines 3, 20, and 40
1320 1324 would (if the file names were the same) appear in the string order
1321 1325 "20" "3" and "40". In contrast, "nfl" does a numeric compare of the
1322 1326 line numbers. In fact, sort_stats("nfl") is the same as
1323 1327 sort_stats("name", "file", "line").
1324 1328
1325 1329 -T <filename>: save profile results as shown on screen to a text
1326 1330 file. The profile is still shown on screen.
1327 1331
1328 1332 -D <filename>: save (via dump_stats) profile statistics to given
1329 1333 filename. This data is in a format understod by the pstats module, and
1330 1334 is generated by a call to the dump_stats() method of profile
1331 1335 objects. The profile is still shown on screen.
1332 1336
1333 1337 If you want to run complete programs under the profiler's control, use
1334 1338 '%run -p [prof_opts] filename.py [args to program]' where prof_opts
1335 1339 contains profiler specific options as described here.
1336 1340
1337 1341 You can read the complete documentation for the profile module with::
1338 1342
1339 1343 In [1]: import profile; profile.help()
1340 1344 """
1341 1345
1342 1346 opts_def = Struct(D=[''],l=[],s=['time'],T=[''])
1343 1347 # protect user quote marks
1344 1348 parameter_s = parameter_s.replace('"',r'\"').replace("'",r"\'")
1345 1349
1346 1350 if user_mode: # regular user call
1347 1351 opts,arg_str = self.parse_options(parameter_s,'D:l:rs:T:',
1348 1352 list_all=1)
1349 1353 namespace = self.shell.user_ns
1350 1354 else: # called to run a program by %run -p
1351 1355 try:
1352 1356 filename = get_py_filename(arg_lst[0])
1353 1357 except IOError,msg:
1354 1358 error(msg)
1355 1359 return
1356 1360
1357 1361 arg_str = 'execfile(filename,prog_ns)'
1358 1362 namespace = locals()
1359 1363
1360 1364 opts.merge(opts_def)
1361 1365
1362 1366 prof = profile.Profile()
1363 1367 try:
1364 1368 prof = prof.runctx(arg_str,namespace,namespace)
1365 1369 sys_exit = ''
1366 1370 except SystemExit:
1367 1371 sys_exit = """*** SystemExit exception caught in code being profiled."""
1368 1372
1369 1373 stats = pstats.Stats(prof).strip_dirs().sort_stats(*opts.s)
1370 1374
1371 1375 lims = opts.l
1372 1376 if lims:
1373 1377 lims = [] # rebuild lims with ints/floats/strings
1374 1378 for lim in opts.l:
1375 1379 try:
1376 1380 lims.append(int(lim))
1377 1381 except ValueError:
1378 1382 try:
1379 1383 lims.append(float(lim))
1380 1384 except ValueError:
1381 1385 lims.append(lim)
1382 1386
1383 1387 # Trap output.
1384 1388 stdout_trap = StringIO()
1385 1389
1386 1390 if hasattr(stats,'stream'):
1387 1391 # In newer versions of python, the stats object has a 'stream'
1388 1392 # attribute to write into.
1389 1393 stats.stream = stdout_trap
1390 1394 stats.print_stats(*lims)
1391 1395 else:
1392 1396 # For older versions, we manually redirect stdout during printing
1393 1397 sys_stdout = sys.stdout
1394 1398 try:
1395 1399 sys.stdout = stdout_trap
1396 1400 stats.print_stats(*lims)
1397 1401 finally:
1398 1402 sys.stdout = sys_stdout
1399 1403
1400 1404 output = stdout_trap.getvalue()
1401 1405 output = output.rstrip()
1402 1406
1403 1407 page(output,screen_lines=self.shell.rc.screen_length)
1404 1408 print sys_exit,
1405 1409
1406 1410 dump_file = opts.D[0]
1407 1411 text_file = opts.T[0]
1408 1412 if dump_file:
1409 1413 prof.dump_stats(dump_file)
1410 1414 print '\n*** Profile stats marshalled to file',\
1411 1415 `dump_file`+'.',sys_exit
1412 1416 if text_file:
1413 1417 pfile = file(text_file,'w')
1414 1418 pfile.write(output)
1415 1419 pfile.close()
1416 1420 print '\n*** Profile printout saved to text file',\
1417 1421 `text_file`+'.',sys_exit
1418 1422
1419 1423 if opts.has_key('r'):
1420 1424 return stats
1421 1425 else:
1422 1426 return None
1423 1427
1424 1428 @testdec.skip_doctest
1425 1429 def magic_run(self, parameter_s ='',runner=None):
1426 1430 """Run the named file inside IPython as a program.
1427 1431
1428 1432 Usage:\\
1429 1433 %run [-n -i -t [-N<N>] -d [-b<N>] -p [profile options]] file [args]
1430 1434
1431 1435 Parameters after the filename are passed as command-line arguments to
1432 1436 the program (put in sys.argv). Then, control returns to IPython's
1433 1437 prompt.
1434 1438
1435 1439 This is similar to running at a system prompt:\\
1436 1440 $ python file args\\
1437 1441 but with the advantage of giving you IPython's tracebacks, and of
1438 1442 loading all variables into your interactive namespace for further use
1439 1443 (unless -p is used, see below).
1440 1444
1441 1445 The file is executed in a namespace initially consisting only of
1442 1446 __name__=='__main__' and sys.argv constructed as indicated. It thus
1443 1447 sees its environment as if it were being run as a stand-alone program
1444 1448 (except for sharing global objects such as previously imported
1445 1449 modules). But after execution, the IPython interactive namespace gets
1446 1450 updated with all variables defined in the program (except for __name__
1447 1451 and sys.argv). This allows for very convenient loading of code for
1448 1452 interactive work, while giving each program a 'clean sheet' to run in.
1449 1453
1450 1454 Options:
1451 1455
1452 1456 -n: __name__ is NOT set to '__main__', but to the running file's name
1453 1457 without extension (as python does under import). This allows running
1454 1458 scripts and reloading the definitions in them without calling code
1455 1459 protected by an ' if __name__ == "__main__" ' clause.
1456 1460
1457 1461 -i: run the file in IPython's namespace instead of an empty one. This
1458 1462 is useful if you are experimenting with code written in a text editor
1459 1463 which depends on variables defined interactively.
1460 1464
1461 1465 -e: ignore sys.exit() calls or SystemExit exceptions in the script
1462 1466 being run. This is particularly useful if IPython is being used to
1463 1467 run unittests, which always exit with a sys.exit() call. In such
1464 1468 cases you are interested in the output of the test results, not in
1465 1469 seeing a traceback of the unittest module.
1466 1470
1467 1471 -t: print timing information at the end of the run. IPython will give
1468 1472 you an estimated CPU time consumption for your script, which under
1469 1473 Unix uses the resource module to avoid the wraparound problems of
1470 1474 time.clock(). Under Unix, an estimate of time spent on system tasks
1471 1475 is also given (for Windows platforms this is reported as 0.0).
1472 1476
1473 1477 If -t is given, an additional -N<N> option can be given, where <N>
1474 1478 must be an integer indicating how many times you want the script to
1475 1479 run. The final timing report will include total and per run results.
1476 1480
1477 1481 For example (testing the script uniq_stable.py):
1478 1482
1479 1483 In [1]: run -t uniq_stable
1480 1484
1481 1485 IPython CPU timings (estimated):\\
1482 1486 User : 0.19597 s.\\
1483 1487 System: 0.0 s.\\
1484 1488
1485 1489 In [2]: run -t -N5 uniq_stable
1486 1490
1487 1491 IPython CPU timings (estimated):\\
1488 1492 Total runs performed: 5\\
1489 1493 Times : Total Per run\\
1490 1494 User : 0.910862 s, 0.1821724 s.\\
1491 1495 System: 0.0 s, 0.0 s.
1492 1496
1493 1497 -d: run your program under the control of pdb, the Python debugger.
1494 1498 This allows you to execute your program step by step, watch variables,
1495 1499 etc. Internally, what IPython does is similar to calling:
1496 1500
1497 1501 pdb.run('execfile("YOURFILENAME")')
1498 1502
1499 1503 with a breakpoint set on line 1 of your file. You can change the line
1500 1504 number for this automatic breakpoint to be <N> by using the -bN option
1501 1505 (where N must be an integer). For example:
1502 1506
1503 1507 %run -d -b40 myscript
1504 1508
1505 1509 will set the first breakpoint at line 40 in myscript.py. Note that
1506 1510 the first breakpoint must be set on a line which actually does
1507 1511 something (not a comment or docstring) for it to stop execution.
1508 1512
1509 1513 When the pdb debugger starts, you will see a (Pdb) prompt. You must
1510 1514 first enter 'c' (without qoutes) to start execution up to the first
1511 1515 breakpoint.
1512 1516
1513 1517 Entering 'help' gives information about the use of the debugger. You
1514 1518 can easily see pdb's full documentation with "import pdb;pdb.help()"
1515 1519 at a prompt.
1516 1520
1517 1521 -p: run program under the control of the Python profiler module (which
1518 1522 prints a detailed report of execution times, function calls, etc).
1519 1523
1520 1524 You can pass other options after -p which affect the behavior of the
1521 1525 profiler itself. See the docs for %prun for details.
1522 1526
1523 1527 In this mode, the program's variables do NOT propagate back to the
1524 1528 IPython interactive namespace (because they remain in the namespace
1525 1529 where the profiler executes them).
1526 1530
1527 1531 Internally this triggers a call to %prun, see its documentation for
1528 1532 details on the options available specifically for profiling.
1529 1533
1530 1534 There is one special usage for which the text above doesn't apply:
1531 1535 if the filename ends with .ipy, the file is run as ipython script,
1532 1536 just as if the commands were written on IPython prompt.
1533 1537 """
1534 1538
1535 1539 # get arguments and set sys.argv for program to be run.
1536 1540 opts,arg_lst = self.parse_options(parameter_s,'nidtN:b:pD:l:rs:T:e',
1537 1541 mode='list',list_all=1)
1538 1542
1539 1543 try:
1540 1544 filename = get_py_filename(arg_lst[0])
1541 1545 except IndexError:
1542 1546 warn('you must provide at least a filename.')
1543 1547 print '\n%run:\n',OInspect.getdoc(self.magic_run)
1544 1548 return
1545 1549 except IOError,msg:
1546 1550 error(msg)
1547 1551 return
1548 1552
1549 1553 if filename.lower().endswith('.ipy'):
1550 1554 self.api.runlines(open(filename).read())
1551 1555 return
1552 1556
1553 1557 # Control the response to exit() calls made by the script being run
1554 1558 exit_ignore = opts.has_key('e')
1555 1559
1556 1560 # Make sure that the running script gets a proper sys.argv as if it
1557 1561 # were run from a system shell.
1558 1562 save_argv = sys.argv # save it for later restoring
1559 1563 sys.argv = [filename]+ arg_lst[1:] # put in the proper filename
1560 1564
1561 1565 if opts.has_key('i'):
1562 1566 # Run in user's interactive namespace
1563 1567 prog_ns = self.shell.user_ns
1564 1568 __name__save = self.shell.user_ns['__name__']
1565 1569 prog_ns['__name__'] = '__main__'
1566 1570 main_mod = FakeModule(prog_ns)
1567 1571 else:
1568 1572 # Run in a fresh, empty namespace
1569 1573 if opts.has_key('n'):
1570 1574 name = os.path.splitext(os.path.basename(filename))[0]
1571 1575 else:
1572 1576 name = '__main__'
1573 1577 main_mod = FakeModule()
1574 1578 prog_ns = main_mod.__dict__
1575 1579 prog_ns['__name__'] = name
1576 1580 # The shell MUST hold a reference to main_mod so after %run exits,
1577 1581 # the python deletion mechanism doesn't zero it out (leaving
1578 1582 # dangling references)
1579 1583 self.shell._user_main_modules.append(main_mod)
1580 1584
1581 1585 # Since '%run foo' emulates 'python foo.py' at the cmd line, we must
1582 1586 # set the __file__ global in the script's namespace
1583 1587 prog_ns['__file__'] = filename
1584 1588
1585 1589 # pickle fix. See iplib for an explanation. But we need to make sure
1586 1590 # that, if we overwrite __main__, we replace it at the end
1587 1591 main_mod_name = prog_ns['__name__']
1588 1592
1589 1593 if main_mod_name == '__main__':
1590 1594 restore_main = sys.modules['__main__']
1591 1595 else:
1592 1596 restore_main = False
1593 1597
1594 1598 # This needs to be undone at the end to prevent holding references to
1595 1599 # every single object ever created.
1596 1600 sys.modules[main_mod_name] = main_mod
1597 1601
1598 1602 stats = None
1599 1603 try:
1600 1604 self.shell.savehist()
1601 1605
1602 1606 if opts.has_key('p'):
1603 1607 stats = self.magic_prun('',0,opts,arg_lst,prog_ns)
1604 1608 else:
1605 1609 if opts.has_key('d'):
1606 1610 deb = Debugger.Pdb(self.shell.rc.colors)
1607 1611 # reset Breakpoint state, which is moronically kept
1608 1612 # in a class
1609 1613 bdb.Breakpoint.next = 1
1610 1614 bdb.Breakpoint.bplist = {}
1611 1615 bdb.Breakpoint.bpbynumber = [None]
1612 1616 # Set an initial breakpoint to stop execution
1613 1617 maxtries = 10
1614 1618 bp = int(opts.get('b',[1])[0])
1615 1619 checkline = deb.checkline(filename,bp)
1616 1620 if not checkline:
1617 1621 for bp in range(bp+1,bp+maxtries+1):
1618 1622 if deb.checkline(filename,bp):
1619 1623 break
1620 1624 else:
1621 1625 msg = ("\nI failed to find a valid line to set "
1622 1626 "a breakpoint\n"
1623 1627 "after trying up to line: %s.\n"
1624 1628 "Please set a valid breakpoint manually "
1625 1629 "with the -b option." % bp)
1626 1630 error(msg)
1627 1631 return
1628 1632 # if we find a good linenumber, set the breakpoint
1629 1633 deb.do_break('%s:%s' % (filename,bp))
1630 1634 # Start file run
1631 1635 print "NOTE: Enter 'c' at the",
1632 1636 print "%s prompt to start your script." % deb.prompt
1633 1637 try:
1634 1638 deb.run('execfile("%s")' % filename,prog_ns)
1635 1639
1636 1640 except:
1637 1641 etype, value, tb = sys.exc_info()
1638 1642 # Skip three frames in the traceback: the %run one,
1639 1643 # one inside bdb.py, and the command-line typed by the
1640 1644 # user (run by exec in pdb itself).
1641 1645 self.shell.InteractiveTB(etype,value,tb,tb_offset=3)
1642 1646 else:
1643 1647 if runner is None:
1644 1648 runner = self.shell.safe_execfile
1645 1649 if opts.has_key('t'):
1646 1650 # timed execution
1647 1651 try:
1648 1652 nruns = int(opts['N'][0])
1649 1653 if nruns < 1:
1650 1654 error('Number of runs must be >=1')
1651 1655 return
1652 1656 except (KeyError):
1653 1657 nruns = 1
1654 1658 if nruns == 1:
1655 1659 t0 = clock2()
1656 1660 runner(filename,prog_ns,prog_ns,
1657 1661 exit_ignore=exit_ignore)
1658 1662 t1 = clock2()
1659 1663 t_usr = t1[0]-t0[0]
1660 1664 t_sys = t1[1]-t1[1]
1661 1665 print "\nIPython CPU timings (estimated):"
1662 1666 print " User : %10s s." % t_usr
1663 1667 print " System: %10s s." % t_sys
1664 1668 else:
1665 1669 runs = range(nruns)
1666 1670 t0 = clock2()
1667 1671 for nr in runs:
1668 1672 runner(filename,prog_ns,prog_ns,
1669 1673 exit_ignore=exit_ignore)
1670 1674 t1 = clock2()
1671 1675 t_usr = t1[0]-t0[0]
1672 1676 t_sys = t1[1]-t1[1]
1673 1677 print "\nIPython CPU timings (estimated):"
1674 1678 print "Total runs performed:",nruns
1675 1679 print " Times : %10s %10s" % ('Total','Per run')
1676 1680 print " User : %10s s, %10s s." % (t_usr,t_usr/nruns)
1677 1681 print " System: %10s s, %10s s." % (t_sys,t_sys/nruns)
1678 1682
1679 1683 else:
1680 1684 # regular execution
1681 1685 runner(filename,prog_ns,prog_ns,exit_ignore=exit_ignore)
1682 1686 if opts.has_key('i'):
1683 1687 self.shell.user_ns['__name__'] = __name__save
1684 1688 else:
1685 1689 # update IPython interactive namespace
1686 1690 del prog_ns['__name__']
1687 1691 self.shell.user_ns.update(prog_ns)
1688 1692 finally:
1689 1693 # Ensure key global structures are restored
1690 1694 sys.argv = save_argv
1691 1695 if restore_main:
1692 1696 sys.modules['__main__'] = restore_main
1693 1697 else:
1694 1698 # Remove from sys.modules the reference to main_mod we'd
1695 1699 # added. Otherwise it will trap references to objects
1696 1700 # contained therein.
1697 1701 del sys.modules[main_mod_name]
1698 1702 self.shell.reloadhist()
1699 1703
1700 1704 return stats
1701 1705
1702 1706 def magic_runlog(self, parameter_s =''):
1703 1707 """Run files as logs.
1704 1708
1705 1709 Usage:\\
1706 1710 %runlog file1 file2 ...
1707 1711
1708 1712 Run the named files (treating them as log files) in sequence inside
1709 1713 the interpreter, and return to the prompt. This is much slower than
1710 1714 %run because each line is executed in a try/except block, but it
1711 1715 allows running files with syntax errors in them.
1712 1716
1713 1717 Normally IPython will guess when a file is one of its own logfiles, so
1714 1718 you can typically use %run even for logs. This shorthand allows you to
1715 1719 force any file to be treated as a log file."""
1716 1720
1717 1721 for f in parameter_s.split():
1718 1722 self.shell.safe_execfile(f,self.shell.user_ns,
1719 1723 self.shell.user_ns,islog=1)
1720 1724
1721 1725 @testdec.skip_doctest
1722 1726 def magic_timeit(self, parameter_s =''):
1723 1727 """Time execution of a Python statement or expression
1724 1728
1725 1729 Usage:\\
1726 1730 %timeit [-n<N> -r<R> [-t|-c]] statement
1727 1731
1728 1732 Time execution of a Python statement or expression using the timeit
1729 1733 module.
1730 1734
1731 1735 Options:
1732 1736 -n<N>: execute the given statement <N> times in a loop. If this value
1733 1737 is not given, a fitting value is chosen.
1734 1738
1735 1739 -r<R>: repeat the loop iteration <R> times and take the best result.
1736 1740 Default: 3
1737 1741
1738 1742 -t: use time.time to measure the time, which is the default on Unix.
1739 1743 This function measures wall time.
1740 1744
1741 1745 -c: use time.clock to measure the time, which is the default on
1742 1746 Windows and measures wall time. On Unix, resource.getrusage is used
1743 1747 instead and returns the CPU user time.
1744 1748
1745 1749 -p<P>: use a precision of <P> digits to display the timing result.
1746 1750 Default: 3
1747 1751
1748 1752
1749 1753 Examples:
1750 1754
1751 1755 In [1]: %timeit pass
1752 1756 10000000 loops, best of 3: 53.3 ns per loop
1753 1757
1754 1758 In [2]: u = None
1755 1759
1756 1760 In [3]: %timeit u is None
1757 1761 10000000 loops, best of 3: 184 ns per loop
1758 1762
1759 1763 In [4]: %timeit -r 4 u == None
1760 1764 1000000 loops, best of 4: 242 ns per loop
1761 1765
1762 1766 In [5]: import time
1763 1767
1764 1768 In [6]: %timeit -n1 time.sleep(2)
1765 1769 1 loops, best of 3: 2 s per loop
1766 1770
1767 1771
1768 1772 The times reported by %timeit will be slightly higher than those
1769 1773 reported by the timeit.py script when variables are accessed. This is
1770 1774 due to the fact that %timeit executes the statement in the namespace
1771 1775 of the shell, compared with timeit.py, which uses a single setup
1772 1776 statement to import function or create variables. Generally, the bias
1773 1777 does not matter as long as results from timeit.py are not mixed with
1774 1778 those from %timeit."""
1775 1779
1776 1780 import timeit
1777 1781 import math
1778 1782
1779 1783 units = [u"s", u"ms", u"\xb5s", u"ns"]
1780 1784 scaling = [1, 1e3, 1e6, 1e9]
1781 1785
1782 1786 opts, stmt = self.parse_options(parameter_s,'n:r:tcp:',
1783 1787 posix=False)
1784 1788 if stmt == "":
1785 1789 return
1786 1790 timefunc = timeit.default_timer
1787 1791 number = int(getattr(opts, "n", 0))
1788 1792 repeat = int(getattr(opts, "r", timeit.default_repeat))
1789 1793 precision = int(getattr(opts, "p", 3))
1790 1794 if hasattr(opts, "t"):
1791 1795 timefunc = time.time
1792 1796 if hasattr(opts, "c"):
1793 1797 timefunc = clock
1794 1798
1795 1799 timer = timeit.Timer(timer=timefunc)
1796 1800 # this code has tight coupling to the inner workings of timeit.Timer,
1797 1801 # but is there a better way to achieve that the code stmt has access
1798 1802 # to the shell namespace?
1799 1803
1800 1804 src = timeit.template % {'stmt': timeit.reindent(stmt, 8),
1801 1805 'setup': "pass"}
1802 1806 # Track compilation time so it can be reported if too long
1803 1807 # Minimum time above which compilation time will be reported
1804 1808 tc_min = 0.1
1805 1809
1806 1810 t0 = clock()
1807 1811 code = compile(src, "<magic-timeit>", "exec")
1808 1812 tc = clock()-t0
1809 1813
1810 1814 ns = {}
1811 1815 exec code in self.shell.user_ns, ns
1812 1816 timer.inner = ns["inner"]
1813 1817
1814 1818 if number == 0:
1815 1819 # determine number so that 0.2 <= total time < 2.0
1816 1820 number = 1
1817 1821 for i in range(1, 10):
1818 1822 number *= 10
1819 1823 if timer.timeit(number) >= 0.2:
1820 1824 break
1821 1825
1822 1826 best = min(timer.repeat(repeat, number)) / number
1823 1827
1824 1828 if best > 0.0:
1825 1829 order = min(-int(math.floor(math.log10(best)) // 3), 3)
1826 1830 else:
1827 1831 order = 3
1828 1832 print u"%d loops, best of %d: %.*g %s per loop" % (number, repeat,
1829 1833 precision,
1830 1834 best * scaling[order],
1831 1835 units[order])
1832 1836 if tc > tc_min:
1833 1837 print "Compiler time: %.2f s" % tc
1834 1838
1835 1839 @testdec.skip_doctest
1836 1840 def magic_time(self,parameter_s = ''):
1837 1841 """Time execution of a Python statement or expression.
1838 1842
1839 1843 The CPU and wall clock times are printed, and the value of the
1840 1844 expression (if any) is returned. Note that under Win32, system time
1841 1845 is always reported as 0, since it can not be measured.
1842 1846
1843 1847 This function provides very basic timing functionality. In Python
1844 1848 2.3, the timeit module offers more control and sophistication, so this
1845 1849 could be rewritten to use it (patches welcome).
1846 1850
1847 1851 Some examples:
1848 1852
1849 1853 In [1]: time 2**128
1850 1854 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1851 1855 Wall time: 0.00
1852 1856 Out[1]: 340282366920938463463374607431768211456L
1853 1857
1854 1858 In [2]: n = 1000000
1855 1859
1856 1860 In [3]: time sum(range(n))
1857 1861 CPU times: user 1.20 s, sys: 0.05 s, total: 1.25 s
1858 1862 Wall time: 1.37
1859 1863 Out[3]: 499999500000L
1860 1864
1861 1865 In [4]: time print 'hello world'
1862 1866 hello world
1863 1867 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1864 1868 Wall time: 0.00
1865 1869
1866 1870 Note that the time needed by Python to compile the given expression
1867 1871 will be reported if it is more than 0.1s. In this example, the
1868 1872 actual exponentiation is done by Python at compilation time, so while
1869 1873 the expression can take a noticeable amount of time to compute, that
1870 1874 time is purely due to the compilation:
1871 1875
1872 1876 In [5]: time 3**9999;
1873 1877 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1874 1878 Wall time: 0.00 s
1875 1879
1876 1880 In [6]: time 3**999999;
1877 1881 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1878 1882 Wall time: 0.00 s
1879 1883 Compiler : 0.78 s
1880 1884 """
1881 1885
1882 1886 # fail immediately if the given expression can't be compiled
1883 1887
1884 1888 expr = self.shell.prefilter(parameter_s,False)
1885 1889
1886 1890 # Minimum time above which compilation time will be reported
1887 1891 tc_min = 0.1
1888 1892
1889 1893 try:
1890 1894 mode = 'eval'
1891 1895 t0 = clock()
1892 1896 code = compile(expr,'<timed eval>',mode)
1893 1897 tc = clock()-t0
1894 1898 except SyntaxError:
1895 1899 mode = 'exec'
1896 1900 t0 = clock()
1897 1901 code = compile(expr,'<timed exec>',mode)
1898 1902 tc = clock()-t0
1899 1903 # skew measurement as little as possible
1900 1904 glob = self.shell.user_ns
1901 1905 clk = clock2
1902 1906 wtime = time.time
1903 1907 # time execution
1904 1908 wall_st = wtime()
1905 1909 if mode=='eval':
1906 1910 st = clk()
1907 1911 out = eval(code,glob)
1908 1912 end = clk()
1909 1913 else:
1910 1914 st = clk()
1911 1915 exec code in glob
1912 1916 end = clk()
1913 1917 out = None
1914 1918 wall_end = wtime()
1915 1919 # Compute actual times and report
1916 1920 wall_time = wall_end-wall_st
1917 1921 cpu_user = end[0]-st[0]
1918 1922 cpu_sys = end[1]-st[1]
1919 1923 cpu_tot = cpu_user+cpu_sys
1920 1924 print "CPU times: user %.2f s, sys: %.2f s, total: %.2f s" % \
1921 1925 (cpu_user,cpu_sys,cpu_tot)
1922 1926 print "Wall time: %.2f s" % wall_time
1923 1927 if tc > tc_min:
1924 1928 print "Compiler : %.2f s" % tc
1925 1929 return out
1926 1930
1927 1931 @testdec.skip_doctest
1928 1932 def magic_macro(self,parameter_s = ''):
1929 1933 """Define a set of input lines as a macro for future re-execution.
1930 1934
1931 1935 Usage:\\
1932 1936 %macro [options] name n1-n2 n3-n4 ... n5 .. n6 ...
1933 1937
1934 1938 Options:
1935 1939
1936 1940 -r: use 'raw' input. By default, the 'processed' history is used,
1937 1941 so that magics are loaded in their transformed version to valid
1938 1942 Python. If this option is given, the raw input as typed as the
1939 1943 command line is used instead.
1940 1944
1941 1945 This will define a global variable called `name` which is a string
1942 1946 made of joining the slices and lines you specify (n1,n2,... numbers
1943 1947 above) from your input history into a single string. This variable
1944 1948 acts like an automatic function which re-executes those lines as if
1945 1949 you had typed them. You just type 'name' at the prompt and the code
1946 1950 executes.
1947 1951
1948 1952 The notation for indicating number ranges is: n1-n2 means 'use line
1949 1953 numbers n1,...n2' (the endpoint is included). That is, '5-7' means
1950 1954 using the lines numbered 5,6 and 7.
1951 1955
1952 1956 Note: as a 'hidden' feature, you can also use traditional python slice
1953 1957 notation, where N:M means numbers N through M-1.
1954 1958
1955 1959 For example, if your history contains (%hist prints it):
1956 1960
1957 1961 44: x=1
1958 1962 45: y=3
1959 1963 46: z=x+y
1960 1964 47: print x
1961 1965 48: a=5
1962 1966 49: print 'x',x,'y',y
1963 1967
1964 1968 you can create a macro with lines 44 through 47 (included) and line 49
1965 1969 called my_macro with:
1966 1970
1967 1971 In [55]: %macro my_macro 44-47 49
1968 1972
1969 1973 Now, typing `my_macro` (without quotes) will re-execute all this code
1970 1974 in one pass.
1971 1975
1972 1976 You don't need to give the line-numbers in order, and any given line
1973 1977 number can appear multiple times. You can assemble macros with any
1974 1978 lines from your input history in any order.
1975 1979
1976 1980 The macro is a simple object which holds its value in an attribute,
1977 1981 but IPython's display system checks for macros and executes them as
1978 1982 code instead of printing them when you type their name.
1979 1983
1980 1984 You can view a macro's contents by explicitly printing it with:
1981 1985
1982 1986 'print macro_name'.
1983 1987
1984 1988 For one-off cases which DON'T contain magic function calls in them you
1985 1989 can obtain similar results by explicitly executing slices from your
1986 1990 input history with:
1987 1991
1988 1992 In [60]: exec In[44:48]+In[49]"""
1989 1993
1990 1994 opts,args = self.parse_options(parameter_s,'r',mode='list')
1991 1995 if not args:
1992 1996 macs = [k for k,v in self.shell.user_ns.items() if isinstance(v, Macro)]
1993 1997 macs.sort()
1994 1998 return macs
1995 1999 if len(args) == 1:
1996 2000 raise UsageError(
1997 2001 "%macro insufficient args; usage '%macro name n1-n2 n3-4...")
1998 2002 name,ranges = args[0], args[1:]
1999 2003
2000 2004 #print 'rng',ranges # dbg
2001 2005 lines = self.extract_input_slices(ranges,opts.has_key('r'))
2002 2006 macro = Macro(lines)
2003 2007 self.shell.user_ns.update({name:macro})
2004 2008 print 'Macro `%s` created. To execute, type its name (without quotes).' % name
2005 2009 print 'Macro contents:'
2006 2010 print macro,
2007 2011
2008 2012 def magic_save(self,parameter_s = ''):
2009 2013 """Save a set of lines to a given filename.
2010 2014
2011 2015 Usage:\\
2012 2016 %save [options] filename n1-n2 n3-n4 ... n5 .. n6 ...
2013 2017
2014 2018 Options:
2015 2019
2016 2020 -r: use 'raw' input. By default, the 'processed' history is used,
2017 2021 so that magics are loaded in their transformed version to valid
2018 2022 Python. If this option is given, the raw input as typed as the
2019 2023 command line is used instead.
2020 2024
2021 2025 This function uses the same syntax as %macro for line extraction, but
2022 2026 instead of creating a macro it saves the resulting string to the
2023 2027 filename you specify.
2024 2028
2025 2029 It adds a '.py' extension to the file if you don't do so yourself, and
2026 2030 it asks for confirmation before overwriting existing files."""
2027 2031
2028 2032 opts,args = self.parse_options(parameter_s,'r',mode='list')
2029 2033 fname,ranges = args[0], args[1:]
2030 2034 if not fname.endswith('.py'):
2031 2035 fname += '.py'
2032 2036 if os.path.isfile(fname):
2033 2037 ans = raw_input('File `%s` exists. Overwrite (y/[N])? ' % fname)
2034 2038 if ans.lower() not in ['y','yes']:
2035 2039 print 'Operation cancelled.'
2036 2040 return
2037 2041 cmds = ''.join(self.extract_input_slices(ranges,opts.has_key('r')))
2038 2042 f = file(fname,'w')
2039 2043 f.write(cmds)
2040 2044 f.close()
2041 2045 print 'The following commands were written to file `%s`:' % fname
2042 2046 print cmds
2043 2047
2044 2048 def _edit_macro(self,mname,macro):
2045 2049 """open an editor with the macro data in a file"""
2046 2050 filename = self.shell.mktempfile(macro.value)
2047 2051 self.shell.hooks.editor(filename)
2048 2052
2049 2053 # and make a new macro object, to replace the old one
2050 2054 mfile = open(filename)
2051 2055 mvalue = mfile.read()
2052 2056 mfile.close()
2053 2057 self.shell.user_ns[mname] = Macro(mvalue)
2054 2058
2055 2059 def magic_ed(self,parameter_s=''):
2056 2060 """Alias to %edit."""
2057 2061 return self.magic_edit(parameter_s)
2058 2062
2059 2063 @testdec.skip_doctest
2060 2064 def magic_edit(self,parameter_s='',last_call=['','']):
2061 2065 """Bring up an editor and execute the resulting code.
2062 2066
2063 2067 Usage:
2064 2068 %edit [options] [args]
2065 2069
2066 2070 %edit runs IPython's editor hook. The default version of this hook is
2067 2071 set to call the __IPYTHON__.rc.editor command. This is read from your
2068 2072 environment variable $EDITOR. If this isn't found, it will default to
2069 2073 vi under Linux/Unix and to notepad under Windows. See the end of this
2070 2074 docstring for how to change the editor hook.
2071 2075
2072 2076 You can also set the value of this editor via the command line option
2073 2077 '-editor' or in your ipythonrc file. This is useful if you wish to use
2074 2078 specifically for IPython an editor different from your typical default
2075 2079 (and for Windows users who typically don't set environment variables).
2076 2080
2077 2081 This command allows you to conveniently edit multi-line code right in
2078 2082 your IPython session.
2079 2083
2080 2084 If called without arguments, %edit opens up an empty editor with a
2081 2085 temporary file and will execute the contents of this file when you
2082 2086 close it (don't forget to save it!).
2083 2087
2084 2088
2085 2089 Options:
2086 2090
2087 2091 -n <number>: open the editor at a specified line number. By default,
2088 2092 the IPython editor hook uses the unix syntax 'editor +N filename', but
2089 2093 you can configure this by providing your own modified hook if your
2090 2094 favorite editor supports line-number specifications with a different
2091 2095 syntax.
2092 2096
2093 2097 -p: this will call the editor with the same data as the previous time
2094 2098 it was used, regardless of how long ago (in your current session) it
2095 2099 was.
2096 2100
2097 2101 -r: use 'raw' input. This option only applies to input taken from the
2098 2102 user's history. By default, the 'processed' history is used, so that
2099 2103 magics are loaded in their transformed version to valid Python. If
2100 2104 this option is given, the raw input as typed as the command line is
2101 2105 used instead. When you exit the editor, it will be executed by
2102 2106 IPython's own processor.
2103 2107
2104 2108 -x: do not execute the edited code immediately upon exit. This is
2105 2109 mainly useful if you are editing programs which need to be called with
2106 2110 command line arguments, which you can then do using %run.
2107 2111
2108 2112
2109 2113 Arguments:
2110 2114
2111 2115 If arguments are given, the following possibilites exist:
2112 2116
2113 2117 - The arguments are numbers or pairs of colon-separated numbers (like
2114 2118 1 4:8 9). These are interpreted as lines of previous input to be
2115 2119 loaded into the editor. The syntax is the same of the %macro command.
2116 2120
2117 2121 - If the argument doesn't start with a number, it is evaluated as a
2118 2122 variable and its contents loaded into the editor. You can thus edit
2119 2123 any string which contains python code (including the result of
2120 2124 previous edits).
2121 2125
2122 2126 - If the argument is the name of an object (other than a string),
2123 2127 IPython will try to locate the file where it was defined and open the
2124 2128 editor at the point where it is defined. You can use `%edit function`
2125 2129 to load an editor exactly at the point where 'function' is defined,
2126 2130 edit it and have the file be executed automatically.
2127 2131
2128 2132 If the object is a macro (see %macro for details), this opens up your
2129 2133 specified editor with a temporary file containing the macro's data.
2130 2134 Upon exit, the macro is reloaded with the contents of the file.
2131 2135
2132 2136 Note: opening at an exact line is only supported under Unix, and some
2133 2137 editors (like kedit and gedit up to Gnome 2.8) do not understand the
2134 2138 '+NUMBER' parameter necessary for this feature. Good editors like
2135 2139 (X)Emacs, vi, jed, pico and joe all do.
2136 2140
2137 2141 - If the argument is not found as a variable, IPython will look for a
2138 2142 file with that name (adding .py if necessary) and load it into the
2139 2143 editor. It will execute its contents with execfile() when you exit,
2140 2144 loading any code in the file into your interactive namespace.
2141 2145
2142 2146 After executing your code, %edit will return as output the code you
2143 2147 typed in the editor (except when it was an existing file). This way
2144 2148 you can reload the code in further invocations of %edit as a variable,
2145 2149 via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
2146 2150 the output.
2147 2151
2148 2152 Note that %edit is also available through the alias %ed.
2149 2153
2150 2154 This is an example of creating a simple function inside the editor and
2151 2155 then modifying it. First, start up the editor:
2152 2156
2153 2157 In [1]: ed
2154 2158 Editing... done. Executing edited code...
2155 2159 Out[1]: 'def foo():n print "foo() was defined in an editing session"n'
2156 2160
2157 2161 We can then call the function foo():
2158 2162
2159 2163 In [2]: foo()
2160 2164 foo() was defined in an editing session
2161 2165
2162 2166 Now we edit foo. IPython automatically loads the editor with the
2163 2167 (temporary) file where foo() was previously defined:
2164 2168
2165 2169 In [3]: ed foo
2166 2170 Editing... done. Executing edited code...
2167 2171
2168 2172 And if we call foo() again we get the modified version:
2169 2173
2170 2174 In [4]: foo()
2171 2175 foo() has now been changed!
2172 2176
2173 2177 Here is an example of how to edit a code snippet successive
2174 2178 times. First we call the editor:
2175 2179
2176 2180 In [5]: ed
2177 2181 Editing... done. Executing edited code...
2178 2182 hello
2179 2183 Out[5]: "print 'hello'n"
2180 2184
2181 2185 Now we call it again with the previous output (stored in _):
2182 2186
2183 2187 In [6]: ed _
2184 2188 Editing... done. Executing edited code...
2185 2189 hello world
2186 2190 Out[6]: "print 'hello world'n"
2187 2191
2188 2192 Now we call it with the output #8 (stored in _8, also as Out[8]):
2189 2193
2190 2194 In [7]: ed _8
2191 2195 Editing... done. Executing edited code...
2192 2196 hello again
2193 2197 Out[7]: "print 'hello again'n"
2194 2198
2195 2199
2196 2200 Changing the default editor hook:
2197 2201
2198 2202 If you wish to write your own editor hook, you can put it in a
2199 2203 configuration file which you load at startup time. The default hook
2200 2204 is defined in the IPython.hooks module, and you can use that as a
2201 2205 starting example for further modifications. That file also has
2202 2206 general instructions on how to set a new hook for use once you've
2203 2207 defined it."""
2204 2208
2205 2209 # FIXME: This function has become a convoluted mess. It needs a
2206 2210 # ground-up rewrite with clean, simple logic.
2207 2211
2208 2212 def make_filename(arg):
2209 2213 "Make a filename from the given args"
2210 2214 try:
2211 2215 filename = get_py_filename(arg)
2212 2216 except IOError:
2213 2217 if args.endswith('.py'):
2214 2218 filename = arg
2215 2219 else:
2216 2220 filename = None
2217 2221 return filename
2218 2222
2219 2223 # custom exceptions
2220 2224 class DataIsObject(Exception): pass
2221 2225
2222 2226 opts,args = self.parse_options(parameter_s,'prxn:')
2223 2227 # Set a few locals from the options for convenience:
2224 2228 opts_p = opts.has_key('p')
2225 2229 opts_r = opts.has_key('r')
2226 2230
2227 2231 # Default line number value
2228 2232 lineno = opts.get('n',None)
2229 2233
2230 2234 if opts_p:
2231 2235 args = '_%s' % last_call[0]
2232 2236 if not self.shell.user_ns.has_key(args):
2233 2237 args = last_call[1]
2234 2238
2235 2239 # use last_call to remember the state of the previous call, but don't
2236 2240 # let it be clobbered by successive '-p' calls.
2237 2241 try:
2238 2242 last_call[0] = self.shell.outputcache.prompt_count
2239 2243 if not opts_p:
2240 2244 last_call[1] = parameter_s
2241 2245 except:
2242 2246 pass
2243 2247
2244 2248 # by default this is done with temp files, except when the given
2245 2249 # arg is a filename
2246 2250 use_temp = 1
2247 2251
2248 2252 if re.match(r'\d',args):
2249 2253 # Mode where user specifies ranges of lines, like in %macro.
2250 2254 # This means that you can't edit files whose names begin with
2251 2255 # numbers this way. Tough.
2252 2256 ranges = args.split()
2253 2257 data = ''.join(self.extract_input_slices(ranges,opts_r))
2254 2258 elif args.endswith('.py'):
2255 2259 filename = make_filename(args)
2256 2260 data = ''
2257 2261 use_temp = 0
2258 2262 elif args:
2259 2263 try:
2260 2264 # Load the parameter given as a variable. If not a string,
2261 2265 # process it as an object instead (below)
2262 2266
2263 2267 #print '*** args',args,'type',type(args) # dbg
2264 2268 data = eval(args,self.shell.user_ns)
2265 2269 if not type(data) in StringTypes:
2266 2270 raise DataIsObject
2267 2271
2268 2272 except (NameError,SyntaxError):
2269 2273 # given argument is not a variable, try as a filename
2270 2274 filename = make_filename(args)
2271 2275 if filename is None:
2272 2276 warn("Argument given (%s) can't be found as a variable "
2273 2277 "or as a filename." % args)
2274 2278 return
2275 2279
2276 2280 data = ''
2277 2281 use_temp = 0
2278 2282 except DataIsObject:
2279 2283
2280 2284 # macros have a special edit function
2281 2285 if isinstance(data,Macro):
2282 2286 self._edit_macro(args,data)
2283 2287 return
2284 2288
2285 2289 # For objects, try to edit the file where they are defined
2286 2290 try:
2287 2291 filename = inspect.getabsfile(data)
2288 2292 if 'fakemodule' in filename.lower() and inspect.isclass(data):
2289 2293 # class created by %edit? Try to find source
2290 2294 # by looking for method definitions instead, the
2291 2295 # __module__ in those classes is FakeModule.
2292 2296 attrs = [getattr(data, aname) for aname in dir(data)]
2293 2297 for attr in attrs:
2294 2298 if not inspect.ismethod(attr):
2295 2299 continue
2296 2300 filename = inspect.getabsfile(attr)
2297 2301 if filename and 'fakemodule' not in filename.lower():
2298 2302 # change the attribute to be the edit target instead
2299 2303 data = attr
2300 2304 break
2301 2305
2302 2306 datafile = 1
2303 2307 except TypeError:
2304 2308 filename = make_filename(args)
2305 2309 datafile = 1
2306 2310 warn('Could not find file where `%s` is defined.\n'
2307 2311 'Opening a file named `%s`' % (args,filename))
2308 2312 # Now, make sure we can actually read the source (if it was in
2309 2313 # a temp file it's gone by now).
2310 2314 if datafile:
2311 2315 try:
2312 2316 if lineno is None:
2313 2317 lineno = inspect.getsourcelines(data)[1]
2314 2318 except IOError:
2315 2319 filename = make_filename(args)
2316 2320 if filename is None:
2317 2321 warn('The file `%s` where `%s` was defined cannot '
2318 2322 'be read.' % (filename,data))
2319 2323 return
2320 2324 use_temp = 0
2321 2325 else:
2322 2326 data = ''
2323 2327
2324 2328 if use_temp:
2325 2329 filename = self.shell.mktempfile(data)
2326 2330 print 'IPython will make a temporary file named:',filename
2327 2331
2328 2332 # do actual editing here
2329 2333 print 'Editing...',
2330 2334 sys.stdout.flush()
2331 self.shell.hooks.editor(filename,lineno)
2335 try:
2336 self.shell.hooks.editor(filename,lineno)
2337 except IPython.ipapi.TryNext:
2338 warn('Could not open editor')
2339 return
2340
2341 # XXX TODO: should this be generalized for all string vars?
2342 # For now, this is special-cased to blocks created by cpaste
2343 if args.strip() == 'pasted_block':
2344 self.shell.user_ns['pasted_block'] = file_read(filename)
2345
2332 2346 if opts.has_key('x'): # -x prevents actual execution
2333 2347 print
2334 2348 else:
2335 2349 print 'done. Executing edited code...'
2336 2350 if opts_r:
2337 2351 self.shell.runlines(file_read(filename))
2338 2352 else:
2339 2353 self.shell.safe_execfile(filename,self.shell.user_ns,
2340 2354 self.shell.user_ns)
2355
2356
2341 2357 if use_temp:
2342 2358 try:
2343 2359 return open(filename).read()
2344 2360 except IOError,msg:
2345 2361 if msg.filename == filename:
2346 2362 warn('File not found. Did you forget to save?')
2347 2363 return
2348 2364 else:
2349 2365 self.shell.showtraceback()
2350 2366
2351 2367 def magic_xmode(self,parameter_s = ''):
2352 2368 """Switch modes for the exception handlers.
2353 2369
2354 2370 Valid modes: Plain, Context and Verbose.
2355 2371
2356 2372 If called without arguments, acts as a toggle."""
2357 2373
2358 2374 def xmode_switch_err(name):
2359 2375 warn('Error changing %s exception modes.\n%s' %
2360 2376 (name,sys.exc_info()[1]))
2361 2377
2362 2378 shell = self.shell
2363 2379 new_mode = parameter_s.strip().capitalize()
2364 2380 try:
2365 2381 shell.InteractiveTB.set_mode(mode=new_mode)
2366 2382 print 'Exception reporting mode:',shell.InteractiveTB.mode
2367 2383 except:
2368 2384 xmode_switch_err('user')
2369 2385
2370 2386 # threaded shells use a special handler in sys.excepthook
2371 2387 if shell.isthreaded:
2372 2388 try:
2373 2389 shell.sys_excepthook.set_mode(mode=new_mode)
2374 2390 except:
2375 2391 xmode_switch_err('threaded')
2376 2392
2377 2393 def magic_colors(self,parameter_s = ''):
2378 2394 """Switch color scheme for prompts, info system and exception handlers.
2379 2395
2380 2396 Currently implemented schemes: NoColor, Linux, LightBG.
2381 2397
2382 2398 Color scheme names are not case-sensitive."""
2383 2399
2384 2400 def color_switch_err(name):
2385 2401 warn('Error changing %s color schemes.\n%s' %
2386 2402 (name,sys.exc_info()[1]))
2387 2403
2388 2404
2389 2405 new_scheme = parameter_s.strip()
2390 2406 if not new_scheme:
2391 2407 raise UsageError(
2392 2408 "%colors: you must specify a color scheme. See '%colors?'")
2393 2409 return
2394 2410 # local shortcut
2395 2411 shell = self.shell
2396 2412
2397 2413 import IPython.rlineimpl as readline
2398 2414
2399 2415 if not readline.have_readline and sys.platform == "win32":
2400 2416 msg = """\
2401 2417 Proper color support under MS Windows requires the pyreadline library.
2402 2418 You can find it at:
2403 2419 http://ipython.scipy.org/moin/PyReadline/Intro
2404 2420 Gary's readline needs the ctypes module, from:
2405 2421 http://starship.python.net/crew/theller/ctypes
2406 2422 (Note that ctypes is already part of Python versions 2.5 and newer).
2407 2423
2408 2424 Defaulting color scheme to 'NoColor'"""
2409 2425 new_scheme = 'NoColor'
2410 2426 warn(msg)
2411 2427
2412 2428 # readline option is 0
2413 2429 if not shell.has_readline:
2414 2430 new_scheme = 'NoColor'
2415 2431
2416 2432 # Set prompt colors
2417 2433 try:
2418 2434 shell.outputcache.set_colors(new_scheme)
2419 2435 except:
2420 2436 color_switch_err('prompt')
2421 2437 else:
2422 2438 shell.rc.colors = \
2423 2439 shell.outputcache.color_table.active_scheme_name
2424 2440 # Set exception colors
2425 2441 try:
2426 2442 shell.InteractiveTB.set_colors(scheme = new_scheme)
2427 2443 shell.SyntaxTB.set_colors(scheme = new_scheme)
2428 2444 except:
2429 2445 color_switch_err('exception')
2430 2446
2431 2447 # threaded shells use a verbose traceback in sys.excepthook
2432 2448 if shell.isthreaded:
2433 2449 try:
2434 2450 shell.sys_excepthook.set_colors(scheme=new_scheme)
2435 2451 except:
2436 2452 color_switch_err('system exception handler')
2437 2453
2438 2454 # Set info (for 'object?') colors
2439 2455 if shell.rc.color_info:
2440 2456 try:
2441 2457 shell.inspector.set_active_scheme(new_scheme)
2442 2458 except:
2443 2459 color_switch_err('object inspector')
2444 2460 else:
2445 2461 shell.inspector.set_active_scheme('NoColor')
2446 2462
2447 2463 def magic_color_info(self,parameter_s = ''):
2448 2464 """Toggle color_info.
2449 2465
2450 2466 The color_info configuration parameter controls whether colors are
2451 2467 used for displaying object details (by things like %psource, %pfile or
2452 2468 the '?' system). This function toggles this value with each call.
2453 2469
2454 2470 Note that unless you have a fairly recent pager (less works better
2455 2471 than more) in your system, using colored object information displays
2456 2472 will not work properly. Test it and see."""
2457 2473
2458 2474 self.shell.rc.color_info = 1 - self.shell.rc.color_info
2459 2475 self.magic_colors(self.shell.rc.colors)
2460 2476 print 'Object introspection functions have now coloring:',
2461 2477 print ['OFF','ON'][self.shell.rc.color_info]
2462 2478
2463 2479 def magic_Pprint(self, parameter_s=''):
2464 2480 """Toggle pretty printing on/off."""
2465 2481
2466 2482 self.shell.rc.pprint = 1 - self.shell.rc.pprint
2467 2483 print 'Pretty printing has been turned', \
2468 2484 ['OFF','ON'][self.shell.rc.pprint]
2469 2485
2470 2486 def magic_exit(self, parameter_s=''):
2471 2487 """Exit IPython, confirming if configured to do so.
2472 2488
2473 2489 You can configure whether IPython asks for confirmation upon exit by
2474 2490 setting the confirm_exit flag in the ipythonrc file."""
2475 2491
2476 2492 self.shell.exit()
2477 2493
2478 2494 def magic_quit(self, parameter_s=''):
2479 2495 """Exit IPython, confirming if configured to do so (like %exit)"""
2480 2496
2481 2497 self.shell.exit()
2482 2498
2483 2499 def magic_Exit(self, parameter_s=''):
2484 2500 """Exit IPython without confirmation."""
2485 2501
2486 2502 self.shell.ask_exit()
2487 2503
2488 2504 #......................................................................
2489 2505 # Functions to implement unix shell-type things
2490 2506
2491 2507 @testdec.skip_doctest
2492 2508 def magic_alias(self, parameter_s = ''):
2493 2509 """Define an alias for a system command.
2494 2510
2495 2511 '%alias alias_name cmd' defines 'alias_name' as an alias for 'cmd'
2496 2512
2497 2513 Then, typing 'alias_name params' will execute the system command 'cmd
2498 2514 params' (from your underlying operating system).
2499 2515
2500 2516 Aliases have lower precedence than magic functions and Python normal
2501 2517 variables, so if 'foo' is both a Python variable and an alias, the
2502 2518 alias can not be executed until 'del foo' removes the Python variable.
2503 2519
2504 2520 You can use the %l specifier in an alias definition to represent the
2505 2521 whole line when the alias is called. For example:
2506 2522
2507 2523 In [2]: alias all echo "Input in brackets: <%l>"
2508 2524 In [3]: all hello world
2509 2525 Input in brackets: <hello world>
2510 2526
2511 2527 You can also define aliases with parameters using %s specifiers (one
2512 2528 per parameter):
2513 2529
2514 2530 In [1]: alias parts echo first %s second %s
2515 2531 In [2]: %parts A B
2516 2532 first A second B
2517 2533 In [3]: %parts A
2518 2534 Incorrect number of arguments: 2 expected.
2519 2535 parts is an alias to: 'echo first %s second %s'
2520 2536
2521 2537 Note that %l and %s are mutually exclusive. You can only use one or
2522 2538 the other in your aliases.
2523 2539
2524 2540 Aliases expand Python variables just like system calls using ! or !!
2525 2541 do: all expressions prefixed with '$' get expanded. For details of
2526 2542 the semantic rules, see PEP-215:
2527 2543 http://www.python.org/peps/pep-0215.html. This is the library used by
2528 2544 IPython for variable expansion. If you want to access a true shell
2529 2545 variable, an extra $ is necessary to prevent its expansion by IPython:
2530 2546
2531 2547 In [6]: alias show echo
2532 2548 In [7]: PATH='A Python string'
2533 2549 In [8]: show $PATH
2534 2550 A Python string
2535 2551 In [9]: show $$PATH
2536 2552 /usr/local/lf9560/bin:/usr/local/intel/compiler70/ia32/bin:...
2537 2553
2538 2554 You can use the alias facility to acess all of $PATH. See the %rehash
2539 2555 and %rehashx functions, which automatically create aliases for the
2540 2556 contents of your $PATH.
2541 2557
2542 2558 If called with no parameters, %alias prints the current alias table."""
2543 2559
2544 2560 par = parameter_s.strip()
2545 2561 if not par:
2546 2562 stored = self.db.get('stored_aliases', {} )
2547 2563 atab = self.shell.alias_table
2548 2564 aliases = atab.keys()
2549 2565 aliases.sort()
2550 2566 res = []
2551 2567 showlast = []
2552 2568 for alias in aliases:
2553 2569 special = False
2554 2570 try:
2555 2571 tgt = atab[alias][1]
2556 2572 except (TypeError, AttributeError):
2557 2573 # unsubscriptable? probably a callable
2558 2574 tgt = atab[alias]
2559 2575 special = True
2560 2576 # 'interesting' aliases
2561 2577 if (alias in stored or
2562 2578 special or
2563 2579 alias.lower() != os.path.splitext(tgt)[0].lower() or
2564 2580 ' ' in tgt):
2565 2581 showlast.append((alias, tgt))
2566 2582 else:
2567 2583 res.append((alias, tgt ))
2568 2584
2569 2585 # show most interesting aliases last
2570 2586 res.extend(showlast)
2571 2587 print "Total number of aliases:",len(aliases)
2572 2588 return res
2573 2589 try:
2574 2590 alias,cmd = par.split(None,1)
2575 2591 except:
2576 2592 print OInspect.getdoc(self.magic_alias)
2577 2593 else:
2578 2594 nargs = cmd.count('%s')
2579 2595 if nargs>0 and cmd.find('%l')>=0:
2580 2596 error('The %s and %l specifiers are mutually exclusive '
2581 2597 'in alias definitions.')
2582 2598 else: # all looks OK
2583 2599 self.shell.alias_table[alias] = (nargs,cmd)
2584 2600 self.shell.alias_table_validate(verbose=0)
2585 2601 # end magic_alias
2586 2602
2587 2603 def magic_unalias(self, parameter_s = ''):
2588 2604 """Remove an alias"""
2589 2605
2590 2606 aname = parameter_s.strip()
2591 2607 if aname in self.shell.alias_table:
2592 2608 del self.shell.alias_table[aname]
2593 2609 stored = self.db.get('stored_aliases', {} )
2594 2610 if aname in stored:
2595 2611 print "Removing %stored alias",aname
2596 2612 del stored[aname]
2597 2613 self.db['stored_aliases'] = stored
2598 2614
2599 2615
2600 2616 def magic_rehashx(self, parameter_s = ''):
2601 2617 """Update the alias table with all executable files in $PATH.
2602 2618
2603 2619 This version explicitly checks that every entry in $PATH is a file
2604 2620 with execute access (os.X_OK), so it is much slower than %rehash.
2605 2621
2606 2622 Under Windows, it checks executability as a match agains a
2607 2623 '|'-separated string of extensions, stored in the IPython config
2608 2624 variable win_exec_ext. This defaults to 'exe|com|bat'.
2609 2625
2610 2626 This function also resets the root module cache of module completer,
2611 2627 used on slow filesystems.
2612 2628 """
2613 2629
2614 2630
2615 2631 ip = self.api
2616 2632
2617 2633 # for the benefit of module completer in ipy_completers.py
2618 2634 del ip.db['rootmodules']
2619 2635
2620 2636 path = [os.path.abspath(os.path.expanduser(p)) for p in
2621 2637 os.environ.get('PATH','').split(os.pathsep)]
2622 2638 path = filter(os.path.isdir,path)
2623 2639
2624 2640 alias_table = self.shell.alias_table
2625 2641 syscmdlist = []
2626 2642 if os.name == 'posix':
2627 2643 isexec = lambda fname:os.path.isfile(fname) and \
2628 2644 os.access(fname,os.X_OK)
2629 2645 else:
2630 2646
2631 2647 try:
2632 2648 winext = os.environ['pathext'].replace(';','|').replace('.','')
2633 2649 except KeyError:
2634 2650 winext = 'exe|com|bat|py'
2635 2651 if 'py' not in winext:
2636 2652 winext += '|py'
2637 2653 execre = re.compile(r'(.*)\.(%s)$' % winext,re.IGNORECASE)
2638 2654 isexec = lambda fname:os.path.isfile(fname) and execre.match(fname)
2639 2655 savedir = os.getcwd()
2640 2656 try:
2641 2657 # write the whole loop for posix/Windows so we don't have an if in
2642 2658 # the innermost part
2643 2659 if os.name == 'posix':
2644 2660 for pdir in path:
2645 2661 os.chdir(pdir)
2646 2662 for ff in os.listdir(pdir):
2647 2663 if isexec(ff) and ff not in self.shell.no_alias:
2648 2664 # each entry in the alias table must be (N,name),
2649 2665 # where N is the number of positional arguments of the
2650 # alias.
2651 alias_table[ff] = (0,ff)
2666 # alias.
2667 # Dots will be removed from alias names, since ipython
2668 # assumes names with dots to be python code
2669 alias_table[ff.replace('.','')] = (0,ff)
2652 2670 syscmdlist.append(ff)
2653 2671 else:
2654 2672 for pdir in path:
2655 2673 os.chdir(pdir)
2656 2674 for ff in os.listdir(pdir):
2657 2675 base, ext = os.path.splitext(ff)
2658 2676 if isexec(ff) and base.lower() not in self.shell.no_alias:
2659 2677 if ext.lower() == '.exe':
2660 2678 ff = base
2661 alias_table[base.lower()] = (0,ff)
2679 alias_table[base.lower().replace('.','')] = (0,ff)
2662 2680 syscmdlist.append(ff)
2663 2681 # Make sure the alias table doesn't contain keywords or builtins
2664 2682 self.shell.alias_table_validate()
2665 2683 # Call again init_auto_alias() so we get 'rm -i' and other
2666 2684 # modified aliases since %rehashx will probably clobber them
2667 2685
2668 2686 # no, we don't want them. if %rehashx clobbers them, good,
2669 2687 # we'll probably get better versions
2670 2688 # self.shell.init_auto_alias()
2671 2689 db = ip.db
2672 2690 db['syscmdlist'] = syscmdlist
2673 2691 finally:
2674 2692 os.chdir(savedir)
2675 2693
2676 2694 def magic_pwd(self, parameter_s = ''):
2677 2695 """Return the current working directory path."""
2678 2696 return os.getcwd()
2679 2697
2680 2698 def magic_cd(self, parameter_s=''):
2681 2699 """Change the current working directory.
2682 2700
2683 2701 This command automatically maintains an internal list of directories
2684 2702 you visit during your IPython session, in the variable _dh. The
2685 2703 command %dhist shows this history nicely formatted. You can also
2686 2704 do 'cd -<tab>' to see directory history conveniently.
2687 2705
2688 2706 Usage:
2689 2707
2690 2708 cd 'dir': changes to directory 'dir'.
2691 2709
2692 2710 cd -: changes to the last visited directory.
2693 2711
2694 2712 cd -<n>: changes to the n-th directory in the directory history.
2695 2713
2696 2714 cd --foo: change to directory that matches 'foo' in history
2697 2715
2698 2716 cd -b <bookmark_name>: jump to a bookmark set by %bookmark
2699 2717 (note: cd <bookmark_name> is enough if there is no
2700 2718 directory <bookmark_name>, but a bookmark with the name exists.)
2701 2719 'cd -b <tab>' allows you to tab-complete bookmark names.
2702 2720
2703 2721 Options:
2704 2722
2705 2723 -q: quiet. Do not print the working directory after the cd command is
2706 2724 executed. By default IPython's cd command does print this directory,
2707 2725 since the default prompts do not display path information.
2708 2726
2709 2727 Note that !cd doesn't work for this purpose because the shell where
2710 2728 !command runs is immediately discarded after executing 'command'."""
2711 2729
2712 2730 parameter_s = parameter_s.strip()
2713 2731 #bkms = self.shell.persist.get("bookmarks",{})
2714 2732
2715 2733 oldcwd = os.getcwd()
2716 2734 numcd = re.match(r'(-)(\d+)$',parameter_s)
2717 2735 # jump in directory history by number
2718 2736 if numcd:
2719 2737 nn = int(numcd.group(2))
2720 2738 try:
2721 2739 ps = self.shell.user_ns['_dh'][nn]
2722 2740 except IndexError:
2723 2741 print 'The requested directory does not exist in history.'
2724 2742 return
2725 2743 else:
2726 2744 opts = {}
2727 2745 elif parameter_s.startswith('--'):
2728 2746 ps = None
2729 2747 fallback = None
2730 2748 pat = parameter_s[2:]
2731 2749 dh = self.shell.user_ns['_dh']
2732 2750 # first search only by basename (last component)
2733 2751 for ent in reversed(dh):
2734 2752 if pat in os.path.basename(ent) and os.path.isdir(ent):
2735 2753 ps = ent
2736 2754 break
2737 2755
2738 2756 if fallback is None and pat in ent and os.path.isdir(ent):
2739 2757 fallback = ent
2740 2758
2741 2759 # if we have no last part match, pick the first full path match
2742 2760 if ps is None:
2743 2761 ps = fallback
2744 2762
2745 2763 if ps is None:
2746 2764 print "No matching entry in directory history"
2747 2765 return
2748 2766 else:
2749 2767 opts = {}
2750 2768
2751 2769
2752 2770 else:
2753 2771 #turn all non-space-escaping backslashes to slashes,
2754 2772 # for c:\windows\directory\names\
2755 2773 parameter_s = re.sub(r'\\(?! )','/', parameter_s)
2756 2774 opts,ps = self.parse_options(parameter_s,'qb',mode='string')
2757 2775 # jump to previous
2758 2776 if ps == '-':
2759 2777 try:
2760 2778 ps = self.shell.user_ns['_dh'][-2]
2761 2779 except IndexError:
2762 2780 raise UsageError('%cd -: No previous directory to change to.')
2763 2781 # jump to bookmark if needed
2764 2782 else:
2765 2783 if not os.path.isdir(ps) or opts.has_key('b'):
2766 2784 bkms = self.db.get('bookmarks', {})
2767 2785
2768 2786 if bkms.has_key(ps):
2769 2787 target = bkms[ps]
2770 2788 print '(bookmark:%s) -> %s' % (ps,target)
2771 2789 ps = target
2772 2790 else:
2773 2791 if opts.has_key('b'):
2774 2792 raise UsageError("Bookmark '%s' not found. "
2775 2793 "Use '%%bookmark -l' to see your bookmarks." % ps)
2776 2794
2777 2795 # at this point ps should point to the target dir
2778 2796 if ps:
2779 2797 try:
2780 2798 os.chdir(os.path.expanduser(ps))
2781 2799 if self.shell.rc.term_title:
2782 2800 #print 'set term title:',self.shell.rc.term_title # dbg
2783 2801 platutils.set_term_title('IPy ' + abbrev_cwd())
2784 2802 except OSError:
2785 2803 print sys.exc_info()[1]
2786 2804 else:
2787 2805 cwd = os.getcwd()
2788 2806 dhist = self.shell.user_ns['_dh']
2789 2807 if oldcwd != cwd:
2790 2808 dhist.append(cwd)
2791 2809 self.db['dhist'] = compress_dhist(dhist)[-100:]
2792 2810
2793 2811 else:
2794 2812 os.chdir(self.shell.home_dir)
2795 2813 if self.shell.rc.term_title:
2796 2814 platutils.set_term_title("IPy ~")
2797 2815 cwd = os.getcwd()
2798 2816 dhist = self.shell.user_ns['_dh']
2799 2817
2800 2818 if oldcwd != cwd:
2801 2819 dhist.append(cwd)
2802 2820 self.db['dhist'] = compress_dhist(dhist)[-100:]
2803 2821 if not 'q' in opts and self.shell.user_ns['_dh']:
2804 2822 print self.shell.user_ns['_dh'][-1]
2805 2823
2806 2824
2807 2825 def magic_env(self, parameter_s=''):
2808 2826 """List environment variables."""
2809 2827
2810 2828 return os.environ.data
2811 2829
2812 2830 def magic_pushd(self, parameter_s=''):
2813 2831 """Place the current dir on stack and change directory.
2814 2832
2815 2833 Usage:\\
2816 2834 %pushd ['dirname']
2817 2835 """
2818 2836
2819 2837 dir_s = self.shell.dir_stack
2820 2838 tgt = os.path.expanduser(parameter_s)
2821 2839 cwd = os.getcwd().replace(self.home_dir,'~')
2822 2840 if tgt:
2823 2841 self.magic_cd(parameter_s)
2824 2842 dir_s.insert(0,cwd)
2825 2843 return self.magic_dirs()
2826 2844
2827 2845 def magic_popd(self, parameter_s=''):
2828 2846 """Change to directory popped off the top of the stack.
2829 2847 """
2830 2848 if not self.shell.dir_stack:
2831 2849 raise UsageError("%popd on empty stack")
2832 2850 top = self.shell.dir_stack.pop(0)
2833 2851 self.magic_cd(top)
2834 2852 print "popd ->",top
2835 2853
2836 2854 def magic_dirs(self, parameter_s=''):
2837 2855 """Return the current directory stack."""
2838 2856
2839 2857 return self.shell.dir_stack
2840 2858
2841 2859 def magic_dhist(self, parameter_s=''):
2842 2860 """Print your history of visited directories.
2843 2861
2844 2862 %dhist -> print full history\\
2845 2863 %dhist n -> print last n entries only\\
2846 2864 %dhist n1 n2 -> print entries between n1 and n2 (n1 not included)\\
2847 2865
2848 2866 This history is automatically maintained by the %cd command, and
2849 2867 always available as the global list variable _dh. You can use %cd -<n>
2850 2868 to go to directory number <n>.
2851 2869
2852 2870 Note that most of time, you should view directory history by entering
2853 2871 cd -<TAB>.
2854 2872
2855 2873 """
2856 2874
2857 2875 dh = self.shell.user_ns['_dh']
2858 2876 if parameter_s:
2859 2877 try:
2860 2878 args = map(int,parameter_s.split())
2861 2879 except:
2862 2880 self.arg_err(Magic.magic_dhist)
2863 2881 return
2864 2882 if len(args) == 1:
2865 2883 ini,fin = max(len(dh)-(args[0]),0),len(dh)
2866 2884 elif len(args) == 2:
2867 2885 ini,fin = args
2868 2886 else:
2869 2887 self.arg_err(Magic.magic_dhist)
2870 2888 return
2871 2889 else:
2872 2890 ini,fin = 0,len(dh)
2873 2891 nlprint(dh,
2874 2892 header = 'Directory history (kept in _dh)',
2875 2893 start=ini,stop=fin)
2876 2894
2877 2895 @testdec.skip_doctest
2878 2896 def magic_sc(self, parameter_s=''):
2879 2897 """Shell capture - execute a shell command and capture its output.
2880 2898
2881 2899 DEPRECATED. Suboptimal, retained for backwards compatibility.
2882 2900
2883 2901 You should use the form 'var = !command' instead. Example:
2884 2902
2885 2903 "%sc -l myfiles = ls ~" should now be written as
2886 2904
2887 2905 "myfiles = !ls ~"
2888 2906
2889 2907 myfiles.s, myfiles.l and myfiles.n still apply as documented
2890 2908 below.
2891 2909
2892 2910 --
2893 2911 %sc [options] varname=command
2894 2912
2895 2913 IPython will run the given command using commands.getoutput(), and
2896 2914 will then update the user's interactive namespace with a variable
2897 2915 called varname, containing the value of the call. Your command can
2898 2916 contain shell wildcards, pipes, etc.
2899 2917
2900 2918 The '=' sign in the syntax is mandatory, and the variable name you
2901 2919 supply must follow Python's standard conventions for valid names.
2902 2920
2903 2921 (A special format without variable name exists for internal use)
2904 2922
2905 2923 Options:
2906 2924
2907 2925 -l: list output. Split the output on newlines into a list before
2908 2926 assigning it to the given variable. By default the output is stored
2909 2927 as a single string.
2910 2928
2911 2929 -v: verbose. Print the contents of the variable.
2912 2930
2913 2931 In most cases you should not need to split as a list, because the
2914 2932 returned value is a special type of string which can automatically
2915 2933 provide its contents either as a list (split on newlines) or as a
2916 2934 space-separated string. These are convenient, respectively, either
2917 2935 for sequential processing or to be passed to a shell command.
2918 2936
2919 2937 For example:
2920 2938
2921 2939 # all-random
2922 2940
2923 2941 # Capture into variable a
2924 2942 In [1]: sc a=ls *py
2925 2943
2926 2944 # a is a string with embedded newlines
2927 2945 In [2]: a
2928 2946 Out[2]: 'setup.py\\nwin32_manual_post_install.py'
2929 2947
2930 2948 # which can be seen as a list:
2931 2949 In [3]: a.l
2932 2950 Out[3]: ['setup.py', 'win32_manual_post_install.py']
2933 2951
2934 2952 # or as a whitespace-separated string:
2935 2953 In [4]: a.s
2936 2954 Out[4]: 'setup.py win32_manual_post_install.py'
2937 2955
2938 2956 # a.s is useful to pass as a single command line:
2939 2957 In [5]: !wc -l $a.s
2940 2958 146 setup.py
2941 2959 130 win32_manual_post_install.py
2942 2960 276 total
2943 2961
2944 2962 # while the list form is useful to loop over:
2945 2963 In [6]: for f in a.l:
2946 2964 ...: !wc -l $f
2947 2965 ...:
2948 2966 146 setup.py
2949 2967 130 win32_manual_post_install.py
2950 2968
2951 2969 Similiarly, the lists returned by the -l option are also special, in
2952 2970 the sense that you can equally invoke the .s attribute on them to
2953 2971 automatically get a whitespace-separated string from their contents:
2954 2972
2955 2973 In [7]: sc -l b=ls *py
2956 2974
2957 2975 In [8]: b
2958 2976 Out[8]: ['setup.py', 'win32_manual_post_install.py']
2959 2977
2960 2978 In [9]: b.s
2961 2979 Out[9]: 'setup.py win32_manual_post_install.py'
2962 2980
2963 2981 In summary, both the lists and strings used for ouptut capture have
2964 2982 the following special attributes:
2965 2983
2966 2984 .l (or .list) : value as list.
2967 2985 .n (or .nlstr): value as newline-separated string.
2968 2986 .s (or .spstr): value as space-separated string.
2969 2987 """
2970 2988
2971 2989 opts,args = self.parse_options(parameter_s,'lv')
2972 2990 # Try to get a variable name and command to run
2973 2991 try:
2974 2992 # the variable name must be obtained from the parse_options
2975 2993 # output, which uses shlex.split to strip options out.
2976 2994 var,_ = args.split('=',1)
2977 2995 var = var.strip()
2978 2996 # But the the command has to be extracted from the original input
2979 2997 # parameter_s, not on what parse_options returns, to avoid the
2980 2998 # quote stripping which shlex.split performs on it.
2981 2999 _,cmd = parameter_s.split('=',1)
2982 3000 except ValueError:
2983 3001 var,cmd = '',''
2984 3002 # If all looks ok, proceed
2985 3003 out,err = self.shell.getoutputerror(cmd)
2986 3004 if err:
2987 3005 print >> Term.cerr,err
2988 3006 if opts.has_key('l'):
2989 3007 out = SList(out.split('\n'))
2990 3008 else:
2991 3009 out = LSString(out)
2992 3010 if opts.has_key('v'):
2993 3011 print '%s ==\n%s' % (var,pformat(out))
2994 3012 if var:
2995 3013 self.shell.user_ns.update({var:out})
2996 3014 else:
2997 3015 return out
2998 3016
2999 3017 def magic_sx(self, parameter_s=''):
3000 3018 """Shell execute - run a shell command and capture its output.
3001 3019
3002 3020 %sx command
3003 3021
3004 3022 IPython will run the given command using commands.getoutput(), and
3005 3023 return the result formatted as a list (split on '\\n'). Since the
3006 3024 output is _returned_, it will be stored in ipython's regular output
3007 3025 cache Out[N] and in the '_N' automatic variables.
3008 3026
3009 3027 Notes:
3010 3028
3011 3029 1) If an input line begins with '!!', then %sx is automatically
3012 3030 invoked. That is, while:
3013 3031 !ls
3014 3032 causes ipython to simply issue system('ls'), typing
3015 3033 !!ls
3016 3034 is a shorthand equivalent to:
3017 3035 %sx ls
3018 3036
3019 3037 2) %sx differs from %sc in that %sx automatically splits into a list,
3020 3038 like '%sc -l'. The reason for this is to make it as easy as possible
3021 3039 to process line-oriented shell output via further python commands.
3022 3040 %sc is meant to provide much finer control, but requires more
3023 3041 typing.
3024 3042
3025 3043 3) Just like %sc -l, this is a list with special attributes:
3026 3044
3027 3045 .l (or .list) : value as list.
3028 3046 .n (or .nlstr): value as newline-separated string.
3029 3047 .s (or .spstr): value as whitespace-separated string.
3030 3048
3031 3049 This is very useful when trying to use such lists as arguments to
3032 3050 system commands."""
3033 3051
3034 3052 if parameter_s:
3035 3053 out,err = self.shell.getoutputerror(parameter_s)
3036 3054 if err:
3037 3055 print >> Term.cerr,err
3038 3056 return SList(out.split('\n'))
3039 3057
3040 3058 def magic_bg(self, parameter_s=''):
3041 3059 """Run a job in the background, in a separate thread.
3042 3060
3043 3061 For example,
3044 3062
3045 3063 %bg myfunc(x,y,z=1)
3046 3064
3047 3065 will execute 'myfunc(x,y,z=1)' in a background thread. As soon as the
3048 3066 execution starts, a message will be printed indicating the job
3049 3067 number. If your job number is 5, you can use
3050 3068
3051 3069 myvar = jobs.result(5) or myvar = jobs[5].result
3052 3070
3053 3071 to assign this result to variable 'myvar'.
3054 3072
3055 3073 IPython has a job manager, accessible via the 'jobs' object. You can
3056 3074 type jobs? to get more information about it, and use jobs.<TAB> to see
3057 3075 its attributes. All attributes not starting with an underscore are
3058 3076 meant for public use.
3059 3077
3060 3078 In particular, look at the jobs.new() method, which is used to create
3061 3079 new jobs. This magic %bg function is just a convenience wrapper
3062 3080 around jobs.new(), for expression-based jobs. If you want to create a
3063 3081 new job with an explicit function object and arguments, you must call
3064 3082 jobs.new() directly.
3065 3083
3066 3084 The jobs.new docstring also describes in detail several important
3067 3085 caveats associated with a thread-based model for background job
3068 3086 execution. Type jobs.new? for details.
3069 3087
3070 3088 You can check the status of all jobs with jobs.status().
3071 3089
3072 3090 The jobs variable is set by IPython into the Python builtin namespace.
3073 3091 If you ever declare a variable named 'jobs', you will shadow this
3074 3092 name. You can either delete your global jobs variable to regain
3075 3093 access to the job manager, or make a new name and assign it manually
3076 3094 to the manager (stored in IPython's namespace). For example, to
3077 3095 assign the job manager to the Jobs name, use:
3078 3096
3079 3097 Jobs = __builtins__.jobs"""
3080 3098
3081 3099 self.shell.jobs.new(parameter_s,self.shell.user_ns)
3082 3100
3083 3101 def magic_r(self, parameter_s=''):
3084 3102 """Repeat previous input.
3085 3103
3086 3104 Note: Consider using the more powerfull %rep instead!
3087 3105
3088 3106 If given an argument, repeats the previous command which starts with
3089 3107 the same string, otherwise it just repeats the previous input.
3090 3108
3091 3109 Shell escaped commands (with ! as first character) are not recognized
3092 3110 by this system, only pure python code and magic commands.
3093 3111 """
3094 3112
3095 3113 start = parameter_s.strip()
3096 3114 esc_magic = self.shell.ESC_MAGIC
3097 3115 # Identify magic commands even if automagic is on (which means
3098 3116 # the in-memory version is different from that typed by the user).
3099 3117 if self.shell.rc.automagic:
3100 3118 start_magic = esc_magic+start
3101 3119 else:
3102 3120 start_magic = start
3103 3121 # Look through the input history in reverse
3104 3122 for n in range(len(self.shell.input_hist)-2,0,-1):
3105 3123 input = self.shell.input_hist[n]
3106 3124 # skip plain 'r' lines so we don't recurse to infinity
3107 3125 if input != '_ip.magic("r")\n' and \
3108 3126 (input.startswith(start) or input.startswith(start_magic)):
3109 3127 #print 'match',`input` # dbg
3110 3128 print 'Executing:',input,
3111 3129 self.shell.runlines(input)
3112 3130 return
3113 3131 print 'No previous input matching `%s` found.' % start
3114 3132
3115 3133
3116 3134 def magic_bookmark(self, parameter_s=''):
3117 3135 """Manage IPython's bookmark system.
3118 3136
3119 3137 %bookmark <name> - set bookmark to current dir
3120 3138 %bookmark <name> <dir> - set bookmark to <dir>
3121 3139 %bookmark -l - list all bookmarks
3122 3140 %bookmark -d <name> - remove bookmark
3123 3141 %bookmark -r - remove all bookmarks
3124 3142
3125 3143 You can later on access a bookmarked folder with:
3126 3144 %cd -b <name>
3127 3145 or simply '%cd <name>' if there is no directory called <name> AND
3128 3146 there is such a bookmark defined.
3129 3147
3130 3148 Your bookmarks persist through IPython sessions, but they are
3131 3149 associated with each profile."""
3132 3150
3133 3151 opts,args = self.parse_options(parameter_s,'drl',mode='list')
3134 3152 if len(args) > 2:
3135 3153 raise UsageError("%bookmark: too many arguments")
3136 3154
3137 3155 bkms = self.db.get('bookmarks',{})
3138 3156
3139 3157 if opts.has_key('d'):
3140 3158 try:
3141 3159 todel = args[0]
3142 3160 except IndexError:
3143 3161 raise UsageError(
3144 3162 "%bookmark -d: must provide a bookmark to delete")
3145 3163 else:
3146 3164 try:
3147 3165 del bkms[todel]
3148 3166 except KeyError:
3149 3167 raise UsageError(
3150 3168 "%%bookmark -d: Can't delete bookmark '%s'" % todel)
3151 3169
3152 3170 elif opts.has_key('r'):
3153 3171 bkms = {}
3154 3172 elif opts.has_key('l'):
3155 3173 bks = bkms.keys()
3156 3174 bks.sort()
3157 3175 if bks:
3158 3176 size = max(map(len,bks))
3159 3177 else:
3160 3178 size = 0
3161 3179 fmt = '%-'+str(size)+'s -> %s'
3162 3180 print 'Current bookmarks:'
3163 3181 for bk in bks:
3164 3182 print fmt % (bk,bkms[bk])
3165 3183 else:
3166 3184 if not args:
3167 3185 raise UsageError("%bookmark: You must specify the bookmark name")
3168 3186 elif len(args)==1:
3169 3187 bkms[args[0]] = os.getcwd()
3170 3188 elif len(args)==2:
3171 3189 bkms[args[0]] = args[1]
3172 3190 self.db['bookmarks'] = bkms
3173 3191
3174 3192 def magic_pycat(self, parameter_s=''):
3175 3193 """Show a syntax-highlighted file through a pager.
3176 3194
3177 3195 This magic is similar to the cat utility, but it will assume the file
3178 3196 to be Python source and will show it with syntax highlighting. """
3179 3197
3180 3198 try:
3181 3199 filename = get_py_filename(parameter_s)
3182 3200 cont = file_read(filename)
3183 3201 except IOError:
3184 3202 try:
3185 3203 cont = eval(parameter_s,self.user_ns)
3186 3204 except NameError:
3187 3205 cont = None
3188 3206 if cont is None:
3189 3207 print "Error: no such file or variable"
3190 3208 return
3191 3209
3192 3210 page(self.shell.pycolorize(cont),
3193 3211 screen_lines=self.shell.rc.screen_length)
3194 3212
3195 3213 def magic_cpaste(self, parameter_s=''):
3196 3214 """Allows you to paste & execute a pre-formatted code block from clipboard.
3197 3215
3198 3216 You must terminate the block with '--' (two minus-signs) alone on the
3199 3217 line. You can also provide your own sentinel with '%paste -s %%' ('%%'
3200 3218 is the new sentinel for this operation)
3201 3219
3202 3220 The block is dedented prior to execution to enable execution of method
3203 3221 definitions. '>' and '+' characters at the beginning of a line are
3204 3222 ignored, to allow pasting directly from e-mails, diff files and
3205 3223 doctests (the '...' continuation prompt is also stripped). The
3206 3224 executed block is also assigned to variable named 'pasted_block' for
3207 3225 later editing with '%edit pasted_block'.
3208 3226
3209 3227 You can also pass a variable name as an argument, e.g. '%cpaste foo'.
3210 3228 This assigns the pasted block to variable 'foo' as string, without
3211 3229 dedenting or executing it (preceding >>> and + is still stripped)
3212 3230
3231 '%cpaste -r' re-executes the block previously entered by cpaste.
3232
3213 3233 Do not be alarmed by garbled output on Windows (it's a readline bug).
3214 3234 Just press enter and type -- (and press enter again) and the block
3215 3235 will be what was just pasted.
3216 3236
3217 3237 IPython statements (magics, shell escapes) are not supported (yet).
3218 3238 """
3219 opts,args = self.parse_options(parameter_s,'s:',mode='string')
3239 opts,args = self.parse_options(parameter_s,'rs:',mode='string')
3220 3240 par = args.strip()
3241 if opts.has_key('r'):
3242 b = self.user_ns.get('pasted_block', None)
3243 if b is None:
3244 raise UsageError('No previous pasted block available')
3245 print "Re-executing '%s...' (%d chars)"% (b.split('\n',1)[0], len(b))
3246 exec b in self.user_ns
3247 return
3248
3221 3249 sentinel = opts.get('s','--')
3222 3250
3223 3251 # Regular expressions that declare text we strip from the input:
3224 3252 strip_re = [r'^\s*In \[\d+\]:', # IPython input prompt
3225 3253 r'^\s*(\s?>)+', # Python input prompt
3226 3254 r'^\s*\.{3,}', # Continuation prompts
3227 3255 r'^\++',
3228 3256 ]
3229 3257
3230 3258 strip_from_start = map(re.compile,strip_re)
3231 3259
3232 3260 from IPython import iplib
3233 3261 lines = []
3234 3262 print "Pasting code; enter '%s' alone on the line to stop." % sentinel
3235 3263 while 1:
3236 3264 l = iplib.raw_input_original(':')
3237 3265 if l ==sentinel:
3238 3266 break
3239 3267
3240 3268 for pat in strip_from_start:
3241 3269 l = pat.sub('',l)
3242 3270 lines.append(l)
3243 3271
3244 3272 block = "\n".join(lines) + '\n'
3245 3273 #print "block:\n",block
3246 3274 if not par:
3247 3275 b = textwrap.dedent(block)
3248 exec b in self.user_ns
3249 3276 self.user_ns['pasted_block'] = b
3277 exec b in self.user_ns
3250 3278 else:
3251 3279 self.user_ns[par] = SList(block.splitlines())
3252 3280 print "Block assigned to '%s'" % par
3253 3281
3254 3282 def magic_quickref(self,arg):
3255 3283 """ Show a quick reference sheet """
3256 3284 import IPython.usage
3257 3285 qr = IPython.usage.quick_reference + self.magic_magic('-brief')
3258 3286
3259 3287 page(qr)
3260 3288
3261 3289 def magic_upgrade(self,arg):
3262 3290 """ Upgrade your IPython installation
3263 3291
3264 3292 This will copy the config files that don't yet exist in your
3265 3293 ipython dir from the system config dir. Use this after upgrading
3266 3294 IPython if you don't wish to delete your .ipython dir.
3267 3295
3268 3296 Call with -nolegacy to get rid of ipythonrc* files (recommended for
3269 3297 new users)
3270 3298
3271 3299 """
3272 3300 ip = self.getapi()
3273 3301 ipinstallation = path(IPython.__file__).dirname()
3274 3302 upgrade_script = '%s "%s"' % (sys.executable,ipinstallation / 'upgrade_dir.py')
3275 3303 src_config = ipinstallation / 'UserConfig'
3276 3304 userdir = path(ip.options.ipythondir)
3277 3305 cmd = '%s "%s" "%s"' % (upgrade_script, src_config, userdir)
3278 3306 print ">",cmd
3279 3307 shell(cmd)
3280 3308 if arg == '-nolegacy':
3281 3309 legacy = userdir.files('ipythonrc*')
3282 3310 print "Nuking legacy files:",legacy
3283 3311
3284 3312 [p.remove() for p in legacy]
3285 3313 suffix = (sys.platform == 'win32' and '.ini' or '')
3286 3314 (userdir / ('ipythonrc' + suffix)).write_text('# Empty, see ipy_user_conf.py\n')
3287 3315
3288 3316
3289 3317 def magic_doctest_mode(self,parameter_s=''):
3290 3318 """Toggle doctest mode on and off.
3291 3319
3292 3320 This mode allows you to toggle the prompt behavior between normal
3293 3321 IPython prompts and ones that are as similar to the default IPython
3294 3322 interpreter as possible.
3295 3323
3296 3324 It also supports the pasting of code snippets that have leading '>>>'
3297 3325 and '...' prompts in them. This means that you can paste doctests from
3298 3326 files or docstrings (even if they have leading whitespace), and the
3299 3327 code will execute correctly. You can then use '%history -tn' to see
3300 3328 the translated history without line numbers; this will give you the
3301 3329 input after removal of all the leading prompts and whitespace, which
3302 3330 can be pasted back into an editor.
3303 3331
3304 3332 With these features, you can switch into this mode easily whenever you
3305 3333 need to do testing and changes to doctests, without having to leave
3306 3334 your existing IPython session.
3307 3335 """
3308 3336
3309 3337 # XXX - Fix this to have cleaner activate/deactivate calls.
3310 3338 from IPython.Extensions import InterpreterPasteInput as ipaste
3311 3339 from IPython.ipstruct import Struct
3312 3340
3313 3341 # Shorthands
3314 3342 shell = self.shell
3315 3343 oc = shell.outputcache
3316 3344 rc = shell.rc
3317 3345 meta = shell.meta
3318 3346 # dstore is a data store kept in the instance metadata bag to track any
3319 3347 # changes we make, so we can undo them later.
3320 3348 dstore = meta.setdefault('doctest_mode',Struct())
3321 3349 save_dstore = dstore.setdefault
3322 3350
3323 3351 # save a few values we'll need to recover later
3324 3352 mode = save_dstore('mode',False)
3325 3353 save_dstore('rc_pprint',rc.pprint)
3326 3354 save_dstore('xmode',shell.InteractiveTB.mode)
3327 3355 save_dstore('rc_separate_out',rc.separate_out)
3328 3356 save_dstore('rc_separate_out2',rc.separate_out2)
3329 3357 save_dstore('rc_prompts_pad_left',rc.prompts_pad_left)
3330 3358 save_dstore('rc_separate_in',rc.separate_in)
3331 3359
3332 3360 if mode == False:
3333 3361 # turn on
3334 3362 ipaste.activate_prefilter()
3335 3363
3336 3364 oc.prompt1.p_template = '>>> '
3337 3365 oc.prompt2.p_template = '... '
3338 3366 oc.prompt_out.p_template = ''
3339 3367
3340 3368 # Prompt separators like plain python
3341 3369 oc.input_sep = oc.prompt1.sep = ''
3342 3370 oc.output_sep = ''
3343 3371 oc.output_sep2 = ''
3344 3372
3345 3373 oc.prompt1.pad_left = oc.prompt2.pad_left = \
3346 3374 oc.prompt_out.pad_left = False
3347 3375
3348 3376 rc.pprint = False
3349 3377
3350 3378 shell.magic_xmode('Plain')
3351 3379
3352 3380 else:
3353 3381 # turn off
3354 3382 ipaste.deactivate_prefilter()
3355 3383
3356 3384 oc.prompt1.p_template = rc.prompt_in1
3357 3385 oc.prompt2.p_template = rc.prompt_in2
3358 3386 oc.prompt_out.p_template = rc.prompt_out
3359 3387
3360 3388 oc.input_sep = oc.prompt1.sep = dstore.rc_separate_in
3361 3389
3362 3390 oc.output_sep = dstore.rc_separate_out
3363 3391 oc.output_sep2 = dstore.rc_separate_out2
3364 3392
3365 3393 oc.prompt1.pad_left = oc.prompt2.pad_left = \
3366 3394 oc.prompt_out.pad_left = dstore.rc_prompts_pad_left
3367 3395
3368 3396 rc.pprint = dstore.rc_pprint
3369 3397
3370 3398 shell.magic_xmode(dstore.xmode)
3371 3399
3372 3400 # Store new mode and inform
3373 3401 dstore.mode = bool(1-int(mode))
3374 3402 print 'Doctest mode is:',
3375 3403 print ['OFF','ON'][dstore.mode]
3376 3404
3377 3405 # end Magic
1 NO CONTENT: modified file chmod 100755 => 100644
@@ -1,99 +1,121
1 1 # -*- coding: utf-8 -*-
2 """Release data for the IPython project.
3
4 $Id: Release.py 3002 2008-02-01 07:17:00Z fperez $"""
2 """Release data for the IPython project."""
5 3
6 4 #*****************************************************************************
7 5 # Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
8 6 #
9 7 # Copyright (c) 2001 Janko Hauser <jhauser@zscout.de> and Nathaniel Gray
10 8 # <n8gray@caltech.edu>
11 9 #
12 10 # Distributed under the terms of the BSD License. The full license is in
13 11 # the file COPYING, distributed as part of this software.
14 12 #*****************************************************************************
15 13
16 14 # Name of the package for release purposes. This is the name which labels
17 15 # the tarballs and RPMs made by distutils, so it's best to lowercase it.
18 16 name = 'ipython'
19 17
20 18 # For versions with substrings (like 0.6.16.svn), use an extra . to separate
21 19 # the new substring. We have to avoid using either dashes or underscores,
22 20 # because bdist_rpm does not accept dashes (an RPM) convention, and
23 21 # bdist_deb does not accept underscores (a Debian convention).
24 22
25 23 development = False # change this to False to do a release
26 version_base = '0.9.beta'
24 version_base = '0.9.1'
27 25 branch = 'ipython'
28 revision = '1099'
26 revision = '1143'
29 27
30 28 if development:
31 29 if branch == 'ipython':
32 30 version = '%s.bzr.r%s' % (version_base, revision)
33 31 else:
34 32 version = '%s.bzr.r%s.%s' % (version_base, revision, branch)
35 33 else:
36 34 version = version_base
37 35
38 36
39 description = "Tools for interactive development in Python."
37 description = "An interactive computing environment for Python"
40 38
41 39 long_description = \
42 40 """
43 IPython provides a replacement for the interactive Python interpreter with
44 extra functionality.
41 The goal of IPython is to create a comprehensive environment for
42 interactive and exploratory computing. To support this goal, IPython
43 has two main components:
44
45 * An enhanced interactive Python shell.
46
47 * An architecture for interactive parallel computing.
48
49 The enhanced interactive Python shell has the following main features:
50
51 * Comprehensive object introspection.
52
53 * Input history, persistent across sessions.
45 54
46 Main features:
55 * Caching of output results during a session with automatically generated
56 references.
47 57
48 * Comprehensive object introspection.
58 * Readline based name completion.
49 59
50 * Input history, persistent across sessions.
60 * Extensible system of 'magic' commands for controlling the environment and
61 performing many tasks related either to IPython or the operating system.
51 62
52 * Caching of output results during a session with automatically generated
53 references.
63 * Configuration system with easy switching between different setups (simpler
64 than changing $PYTHONSTARTUP environment variables every time).
54 65
55 * Readline based name completion.
66 * Session logging and reloading.
56 67
57 * Extensible system of 'magic' commands for controlling the environment and
58 performing many tasks related either to IPython or the operating system.
68 * Extensible syntax processing for special purpose situations.
59 69
60 * Configuration system with easy switching between different setups (simpler
61 than changing $PYTHONSTARTUP environment variables every time).
70 * Access to the system shell with user-extensible alias system.
62 71
63 * Session logging and reloading.
72 * Easily embeddable in other Python programs and wxPython GUIs.
64 73
65 * Extensible syntax processing for special purpose situations.
74 * Integrated access to the pdb debugger and the Python profiler.
66 75
67 * Access to the system shell with user-extensible alias system.
76 The parallel computing architecture has the following main features:
68 77
69 * Easily embeddable in other Python programs.
78 * Quickly parallelize Python code from an interactive Python/IPython session.
70 79
71 * Integrated access to the pdb debugger and the Python profiler.
80 * A flexible and dynamic process model that be deployed on anything from
81 multicore workstations to supercomputers.
72 82
73 The latest development version is always available at the IPython subversion
74 repository_.
83 * An architecture that supports many different styles of parallelism, from
84 message passing to task farming.
75 85
76 .. _repository: http://ipython.scipy.org/svn/ipython/ipython/trunk#egg=ipython-dev
77 """
86 * Both blocking and fully asynchronous interfaces.
87
88 * High level APIs that enable many things to be parallelized in a few lines
89 of code.
90
91 * Share live parallel jobs with other users securely.
92
93 * Dynamically load balanced task farming system.
94
95 * Robust error handling in parallel code.
96
97 The latest development version is always available from IPython's `Launchpad
98 site <http://launchpad.net/ipython>`_.
99 """
78 100
79 101 license = 'BSD'
80 102
81 103 authors = {'Fernando' : ('Fernando Perez','fperez@colorado.edu'),
82 104 'Janko' : ('Janko Hauser','jhauser@zscout.de'),
83 105 'Nathan' : ('Nathaniel Gray','n8gray@caltech.edu'),
84 106 'Ville' : ('Ville Vainio','vivainio@gmail.com'),
85 107 'Brian' : ('Brian E Granger', 'ellisonbg@gmail.com'),
86 108 'Min' : ('Min Ragan-Kelley', 'benjaminrk@gmail.com')
87 109 }
88 110
89 111 author = 'The IPython Development Team'
90 112
91 113 author_email = 'ipython-dev@scipy.org'
92 114
93 115 url = 'http://ipython.scipy.org'
94 116
95 117 download_url = 'http://ipython.scipy.org/dist'
96 118
97 119 platforms = ['Linux','Mac OSX','Windows XP/2000/NT','Windows 95/98/ME']
98 120
99 121 keywords = ['Interactive','Interpreter','Shell','Parallel','Distributed']
@@ -1,1236 +1,1249
1 1 # -*- coding: utf-8 -*-
2 2 """IPython Shell classes.
3 3
4 4 All the matplotlib support code was co-developed with John Hunter,
5 5 matplotlib's author.
6 6
7 7 $Id: Shell.py 3024 2008-02-07 15:34:42Z darren.dale $"""
8 8
9 9 #*****************************************************************************
10 10 # Copyright (C) 2001-2006 Fernando Perez <fperez@colorado.edu>
11 11 #
12 12 # Distributed under the terms of the BSD License. The full license is in
13 13 # the file COPYING, distributed as part of this software.
14 14 #*****************************************************************************
15 15
16 16 from IPython import Release
17 17 __author__ = '%s <%s>' % Release.authors['Fernando']
18 18 __license__ = Release.license
19 19
20 20 # Code begins
21 21 # Stdlib imports
22 22 import __builtin__
23 23 import __main__
24 24 import Queue
25 25 import inspect
26 26 import os
27 27 import sys
28 28 import thread
29 29 import threading
30 30 import time
31 31
32 32 from signal import signal, SIGINT
33 33
34 34 try:
35 35 import ctypes
36 36 HAS_CTYPES = True
37 37 except ImportError:
38 38 HAS_CTYPES = False
39 39
40 40 # IPython imports
41 41 import IPython
42 42 from IPython import ultraTB, ipapi
43 from IPython.Magic import Magic
43 44 from IPython.genutils import Term,warn,error,flag_calls, ask_yes_no
44 45 from IPython.iplib import InteractiveShell
45 46 from IPython.ipmaker import make_IPython
46 from IPython.Magic import Magic
47 47 from IPython.ipstruct import Struct
48 from IPython.testing import decorators as testdec
48 49
49 50 # Globals
50 51 # global flag to pass around information about Ctrl-C without exceptions
51 52 KBINT = False
52 53
53 54 # global flag to turn on/off Tk support.
54 55 USE_TK = False
55 56
56 57 # ID for the main thread, used for cross-thread exceptions
57 58 MAIN_THREAD_ID = thread.get_ident()
58 59
59 60 # Tag when runcode() is active, for exception handling
60 61 CODE_RUN = None
61 62
62 63 # Default timeout for waiting for multithreaded shells (in seconds)
63 64 GUI_TIMEOUT = 10
64 65
65 66 #-----------------------------------------------------------------------------
66 67 # This class is trivial now, but I want to have it in to publish a clean
67 68 # interface. Later when the internals are reorganized, code that uses this
68 69 # shouldn't have to change.
69 70
70 71 class IPShell:
71 72 """Create an IPython instance."""
72 73
73 74 def __init__(self,argv=None,user_ns=None,user_global_ns=None,
74 75 debug=1,shell_class=InteractiveShell):
75 76 self.IP = make_IPython(argv,user_ns=user_ns,
76 77 user_global_ns=user_global_ns,
77 78 debug=debug,shell_class=shell_class)
78 79
79 80 def mainloop(self,sys_exit=0,banner=None):
80 81 self.IP.mainloop(banner)
81 82 if sys_exit:
82 83 sys.exit()
83 84
84 85 #-----------------------------------------------------------------------------
85 86 def kill_embedded(self,parameter_s=''):
86 87 """%kill_embedded : deactivate for good the current embedded IPython.
87 88
88 89 This function (after asking for confirmation) sets an internal flag so that
89 90 an embedded IPython will never activate again. This is useful to
90 91 permanently disable a shell that is being called inside a loop: once you've
91 92 figured out what you needed from it, you may then kill it and the program
92 93 will then continue to run without the interactive shell interfering again.
93 94 """
94 95
95 96 kill = ask_yes_no("Are you sure you want to kill this embedded instance "
96 97 "(y/n)? [y/N] ",'n')
97 98 if kill:
98 99 self.shell.embedded_active = False
99 100 print "This embedded IPython will not reactivate anymore once you exit."
100 101
101 102 class IPShellEmbed:
102 103 """Allow embedding an IPython shell into a running program.
103 104
104 105 Instances of this class are callable, with the __call__ method being an
105 106 alias to the embed() method of an InteractiveShell instance.
106 107
107 108 Usage (see also the example-embed.py file for a running example):
108 109
109 110 ipshell = IPShellEmbed([argv,banner,exit_msg,rc_override])
110 111
111 112 - argv: list containing valid command-line options for IPython, as they
112 113 would appear in sys.argv[1:].
113 114
114 115 For example, the following command-line options:
115 116
116 117 $ ipython -prompt_in1 'Input <\\#>' -colors LightBG
117 118
118 119 would be passed in the argv list as:
119 120
120 121 ['-prompt_in1','Input <\\#>','-colors','LightBG']
121 122
122 123 - banner: string which gets printed every time the interpreter starts.
123 124
124 125 - exit_msg: string which gets printed every time the interpreter exits.
125 126
126 127 - rc_override: a dict or Struct of configuration options such as those
127 128 used by IPython. These options are read from your ~/.ipython/ipythonrc
128 129 file when the Shell object is created. Passing an explicit rc_override
129 130 dict with any options you want allows you to override those values at
130 131 creation time without having to modify the file. This way you can create
131 132 embeddable instances configured in any way you want without editing any
132 133 global files (thus keeping your interactive IPython configuration
133 134 unchanged).
134 135
135 136 Then the ipshell instance can be called anywhere inside your code:
136 137
137 138 ipshell(header='') -> Opens up an IPython shell.
138 139
139 140 - header: string printed by the IPython shell upon startup. This can let
140 141 you know where in your code you are when dropping into the shell. Note
141 142 that 'banner' gets prepended to all calls, so header is used for
142 143 location-specific information.
143 144
144 145 For more details, see the __call__ method below.
145 146
146 147 When the IPython shell is exited with Ctrl-D, normal program execution
147 148 resumes.
148 149
149 150 This functionality was inspired by a posting on comp.lang.python by cmkl
150 151 <cmkleffner@gmx.de> on Dec. 06/01 concerning similar uses of pyrepl, and
151 152 by the IDL stop/continue commands."""
152 153
153 154 def __init__(self,argv=None,banner='',exit_msg=None,rc_override=None,
154 155 user_ns=None):
155 156 """Note that argv here is a string, NOT a list."""
156 157 self.set_banner(banner)
157 158 self.set_exit_msg(exit_msg)
158 159 self.set_dummy_mode(0)
159 160
160 161 # sys.displayhook is a global, we need to save the user's original
161 162 # Don't rely on __displayhook__, as the user may have changed that.
162 163 self.sys_displayhook_ori = sys.displayhook
163 164
164 165 # save readline completer status
165 166 try:
166 167 #print 'Save completer',sys.ipcompleter # dbg
167 168 self.sys_ipcompleter_ori = sys.ipcompleter
168 169 except:
169 170 pass # not nested with IPython
170 171
171 172 self.IP = make_IPython(argv,rc_override=rc_override,
172 173 embedded=True,
173 174 user_ns=user_ns)
174 175
175 176 ip = ipapi.IPApi(self.IP)
176 177 ip.expose_magic("kill_embedded",kill_embedded)
177 178
178 179 # copy our own displayhook also
179 180 self.sys_displayhook_embed = sys.displayhook
180 181 # and leave the system's display hook clean
181 182 sys.displayhook = self.sys_displayhook_ori
182 183 # don't use the ipython crash handler so that user exceptions aren't
183 184 # trapped
184 185 sys.excepthook = ultraTB.FormattedTB(color_scheme = self.IP.rc.colors,
185 186 mode = self.IP.rc.xmode,
186 187 call_pdb = self.IP.rc.pdb)
187 188 self.restore_system_completer()
188 189
189 190 def restore_system_completer(self):
190 191 """Restores the readline completer which was in place.
191 192
192 193 This allows embedded IPython within IPython not to disrupt the
193 194 parent's completion.
194 195 """
195 196
196 197 try:
197 198 self.IP.readline.set_completer(self.sys_ipcompleter_ori)
198 199 sys.ipcompleter = self.sys_ipcompleter_ori
199 200 except:
200 201 pass
201 202
202 203 def __call__(self,header='',local_ns=None,global_ns=None,dummy=None):
203 204 """Activate the interactive interpreter.
204 205
205 206 __call__(self,header='',local_ns=None,global_ns,dummy=None) -> Start
206 207 the interpreter shell with the given local and global namespaces, and
207 208 optionally print a header string at startup.
208 209
209 210 The shell can be globally activated/deactivated using the
210 211 set/get_dummy_mode methods. This allows you to turn off a shell used
211 212 for debugging globally.
212 213
213 214 However, *each* time you call the shell you can override the current
214 215 state of dummy_mode with the optional keyword parameter 'dummy'. For
215 216 example, if you set dummy mode on with IPShell.set_dummy_mode(1), you
216 217 can still have a specific call work by making it as IPShell(dummy=0).
217 218
218 219 The optional keyword parameter dummy controls whether the call
219 220 actually does anything. """
220 221
221 222 # If the user has turned it off, go away
222 223 if not self.IP.embedded_active:
223 224 return
224 225
225 226 # Normal exits from interactive mode set this flag, so the shell can't
226 227 # re-enter (it checks this variable at the start of interactive mode).
227 228 self.IP.exit_now = False
228 229
229 230 # Allow the dummy parameter to override the global __dummy_mode
230 231 if dummy or (dummy != 0 and self.__dummy_mode):
231 232 return
232 233
233 234 # Set global subsystems (display,completions) to our values
234 235 sys.displayhook = self.sys_displayhook_embed
235 236 if self.IP.has_readline:
236 237 self.IP.set_completer()
237 238
238 239 if self.banner and header:
239 240 format = '%s\n%s\n'
240 241 else:
241 242 format = '%s%s\n'
242 243 banner = format % (self.banner,header)
243 244
244 245 # Call the embedding code with a stack depth of 1 so it can skip over
245 246 # our call and get the original caller's namespaces.
246 247 self.IP.embed_mainloop(banner,local_ns,global_ns,stack_depth=1)
247 248
248 249 if self.exit_msg:
249 250 print self.exit_msg
250 251
251 252 # Restore global systems (display, completion)
252 253 sys.displayhook = self.sys_displayhook_ori
253 254 self.restore_system_completer()
254 255
255 256 def set_dummy_mode(self,dummy):
256 257 """Sets the embeddable shell's dummy mode parameter.
257 258
258 259 set_dummy_mode(dummy): dummy = 0 or 1.
259 260
260 261 This parameter is persistent and makes calls to the embeddable shell
261 262 silently return without performing any action. This allows you to
262 263 globally activate or deactivate a shell you're using with a single call.
263 264
264 265 If you need to manually"""
265 266
266 267 if dummy not in [0,1,False,True]:
267 268 raise ValueError,'dummy parameter must be boolean'
268 269 self.__dummy_mode = dummy
269 270
270 271 def get_dummy_mode(self):
271 272 """Return the current value of the dummy mode parameter.
272 273 """
273 274 return self.__dummy_mode
274 275
275 276 def set_banner(self,banner):
276 277 """Sets the global banner.
277 278
278 279 This banner gets prepended to every header printed when the shell
279 280 instance is called."""
280 281
281 282 self.banner = banner
282 283
283 284 def set_exit_msg(self,exit_msg):
284 285 """Sets the global exit_msg.
285 286
286 287 This exit message gets printed upon exiting every time the embedded
287 288 shell is called. It is None by default. """
288 289
289 290 self.exit_msg = exit_msg
290 291
291 292 #-----------------------------------------------------------------------------
292 293 if HAS_CTYPES:
293 294 # Add async exception support. Trick taken from:
294 295 # http://sebulba.wikispaces.com/recipe+thread2
295 296 def _async_raise(tid, exctype):
296 297 """raises the exception, performs cleanup if needed"""
297 298 if not inspect.isclass(exctype):
298 299 raise TypeError("Only types can be raised (not instances)")
299 300 res = ctypes.pythonapi.PyThreadState_SetAsyncExc(tid,
300 301 ctypes.py_object(exctype))
301 302 if res == 0:
302 303 raise ValueError("invalid thread id")
303 304 elif res != 1:
304 305 # """if it returns a number greater than one, you're in trouble,
305 306 # and you should call it again with exc=NULL to revert the effect"""
306 307 ctypes.pythonapi.PyThreadState_SetAsyncExc(tid, 0)
307 308 raise SystemError("PyThreadState_SetAsyncExc failed")
308 309
309 310 def sigint_handler (signum,stack_frame):
310 311 """Sigint handler for threaded apps.
311 312
312 313 This is a horrible hack to pass information about SIGINT _without_
313 314 using exceptions, since I haven't been able to properly manage
314 315 cross-thread exceptions in GTK/WX. In fact, I don't think it can be
315 316 done (or at least that's my understanding from a c.l.py thread where
316 317 this was discussed)."""
317 318
318 319 global KBINT
319 320
320 321 if CODE_RUN:
321 322 _async_raise(MAIN_THREAD_ID,KeyboardInterrupt)
322 323 else:
323 324 KBINT = True
324 325 print '\nKeyboardInterrupt - Press <Enter> to continue.',
325 326 Term.cout.flush()
326 327
327 328 else:
328 329 def sigint_handler (signum,stack_frame):
329 330 """Sigint handler for threaded apps.
330 331
331 332 This is a horrible hack to pass information about SIGINT _without_
332 333 using exceptions, since I haven't been able to properly manage
333 334 cross-thread exceptions in GTK/WX. In fact, I don't think it can be
334 335 done (or at least that's my understanding from a c.l.py thread where
335 336 this was discussed)."""
336 337
337 338 global KBINT
338 339
339 340 print '\nKeyboardInterrupt - Press <Enter> to continue.',
340 341 Term.cout.flush()
341 342 # Set global flag so that runsource can know that Ctrl-C was hit
342 343 KBINT = True
343 344
344 345
345 346 class MTInteractiveShell(InteractiveShell):
346 347 """Simple multi-threaded shell."""
347 348
348 349 # Threading strategy taken from:
349 350 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/65109, by Brian
350 351 # McErlean and John Finlay. Modified with corrections by Antoon Pardon,
351 352 # from the pygtk mailing list, to avoid lockups with system calls.
352 353
353 354 # class attribute to indicate whether the class supports threads or not.
354 355 # Subclasses with thread support should override this as needed.
355 356 isthreaded = True
356 357
357 358 def __init__(self,name,usage=None,rc=Struct(opts=None,args=None),
358 359 user_ns=None,user_global_ns=None,banner2='',
359 360 gui_timeout=GUI_TIMEOUT,**kw):
360 361 """Similar to the normal InteractiveShell, but with threading control"""
361 362
362 363 InteractiveShell.__init__(self,name,usage,rc,user_ns,
363 364 user_global_ns,banner2)
364 365
365 366 # Timeout we wait for GUI thread
366 367 self.gui_timeout = gui_timeout
367 368
368 369 # A queue to hold the code to be executed.
369 370 self.code_queue = Queue.Queue()
370 371
371 372 # Stuff to do at closing time
372 373 self._kill = None
373 374 on_kill = kw.get('on_kill', [])
374 375 # Check that all things to kill are callable:
375 376 for t in on_kill:
376 377 if not callable(t):
377 378 raise TypeError,'on_kill must be a list of callables'
378 379 self.on_kill = on_kill
379 380 # thread identity of the "worker thread" (that may execute code directly)
380 381 self.worker_ident = None
381 382
382 383 def runsource(self, source, filename="<input>", symbol="single"):
383 384 """Compile and run some source in the interpreter.
384 385
385 386 Modified version of code.py's runsource(), to handle threading issues.
386 387 See the original for full docstring details."""
387
388
388 389 global KBINT
389 390
390 391 # If Ctrl-C was typed, we reset the flag and return right away
391 392 if KBINT:
392 393 KBINT = False
393 394 return False
394 395
395 396 if self._kill:
396 397 # can't queue new code if we are being killed
397 398 return True
398 399
399 400 try:
400 401 code = self.compile(source, filename, symbol)
401 402 except (OverflowError, SyntaxError, ValueError):
402 403 # Case 1
403 404 self.showsyntaxerror(filename)
404 405 return False
405 406
406 407 if code is None:
407 408 # Case 2
408 409 return True
409 410
410 411 # shortcut - if we are in worker thread, or the worker thread is not
411 412 # running, execute directly (to allow recursion and prevent deadlock if
412 413 # code is run early in IPython construction)
413 414
414 415 if (self.worker_ident is None
415 416 or self.worker_ident == thread.get_ident() ):
416 417 InteractiveShell.runcode(self,code)
417 return
418 return False
418 419
419 420 # Case 3
420 421 # Store code in queue, so the execution thread can handle it.
421 422
422 423 completed_ev, received_ev = threading.Event(), threading.Event()
423 424
424 425 self.code_queue.put((code,completed_ev, received_ev))
425 426 # first make sure the message was received, with timeout
426 427 received_ev.wait(self.gui_timeout)
427 428 if not received_ev.isSet():
428 429 # the mainloop is dead, start executing code directly
429 430 print "Warning: Timeout for mainloop thread exceeded"
430 431 print "switching to nonthreaded mode (until mainloop wakes up again)"
431 432 self.worker_ident = None
432 433 else:
433 434 completed_ev.wait()
434 435 return False
435 436
436 437 def runcode(self):
437 438 """Execute a code object.
438 439
439 440 Multithreaded wrapper around IPython's runcode()."""
440 441
441 442 global CODE_RUN
442 443
443 444 # we are in worker thread, stash out the id for runsource()
444 445 self.worker_ident = thread.get_ident()
445 446
446 447 if self._kill:
447 448 print >>Term.cout, 'Closing threads...',
448 449 Term.cout.flush()
449 450 for tokill in self.on_kill:
450 451 tokill()
451 452 print >>Term.cout, 'Done.'
452 453 # allow kill() to return
453 454 self._kill.set()
454 455 return True
455 456
456 457 # Install sigint handler. We do it every time to ensure that if user
457 458 # code modifies it, we restore our own handling.
458 459 try:
459 460 signal(SIGINT,sigint_handler)
460 461 except SystemError:
461 462 # This happens under Windows, which seems to have all sorts
462 463 # of problems with signal handling. Oh well...
463 464 pass
464 465
465 466 # Flush queue of pending code by calling the run methood of the parent
466 467 # class with all items which may be in the queue.
467 468 code_to_run = None
468 469 while 1:
469 470 try:
470 471 code_to_run, completed_ev, received_ev = self.code_queue.get_nowait()
471 472 except Queue.Empty:
472 473 break
473 474 received_ev.set()
474 475
475 476 # Exceptions need to be raised differently depending on which
476 477 # thread is active. This convoluted try/except is only there to
477 478 # protect against asynchronous exceptions, to ensure that a KBINT
478 479 # at the wrong time doesn't deadlock everything. The global
479 480 # CODE_TO_RUN is set to true/false as close as possible to the
480 481 # runcode() call, so that the KBINT handler is correctly informed.
481 482 try:
482 483 try:
483 484 CODE_RUN = True
484 485 InteractiveShell.runcode(self,code_to_run)
485 486 except KeyboardInterrupt:
486 487 print "Keyboard interrupted in mainloop"
487 488 while not self.code_queue.empty():
488 489 code, ev1,ev2 = self.code_queue.get_nowait()
489 490 ev1.set()
490 491 ev2.set()
491 492 break
492 493 finally:
493 494 CODE_RUN = False
494 495 # allow runsource() return from wait
495 496 completed_ev.set()
496 497
497 498
498 499 # This MUST return true for gtk threading to work
499 500 return True
500 501
501 502 def kill(self):
502 503 """Kill the thread, returning when it has been shut down."""
503 504 self._kill = threading.Event()
504 505 self._kill.wait()
505 506
506 507 class MatplotlibShellBase:
507 508 """Mixin class to provide the necessary modifications to regular IPython
508 509 shell classes for matplotlib support.
509 510
510 511 Given Python's MRO, this should be used as the FIRST class in the
511 512 inheritance hierarchy, so that it overrides the relevant methods."""
512 513
513 514 def _matplotlib_config(self,name,user_ns,user_global_ns=None):
514 515 """Return items needed to setup the user's shell with matplotlib"""
515 516
516 517 # Initialize matplotlib to interactive mode always
517 518 import matplotlib
518 519 from matplotlib import backends
519 520 matplotlib.interactive(True)
520 521
521 522 def use(arg):
522 523 """IPython wrapper for matplotlib's backend switcher.
523 524
524 525 In interactive use, we can not allow switching to a different
525 526 interactive backend, since thread conflicts will most likely crash
526 527 the python interpreter. This routine does a safety check first,
527 528 and refuses to perform a dangerous switch. It still allows
528 529 switching to non-interactive backends."""
529 530
530 531 if arg in backends.interactive_bk and arg != self.mpl_backend:
531 532 m=('invalid matplotlib backend switch.\n'
532 533 'This script attempted to switch to the interactive '
533 534 'backend: `%s`\n'
534 535 'Your current choice of interactive backend is: `%s`\n\n'
535 536 'Switching interactive matplotlib backends at runtime\n'
536 537 'would crash the python interpreter, '
537 538 'and IPython has blocked it.\n\n'
538 539 'You need to either change your choice of matplotlib backend\n'
539 540 'by editing your .matplotlibrc file, or run this script as a \n'
540 541 'standalone file from the command line, not using IPython.\n' %
541 542 (arg,self.mpl_backend) )
542 543 raise RuntimeError, m
543 544 else:
544 545 self.mpl_use(arg)
545 546 self.mpl_use._called = True
546 547
547 548 self.matplotlib = matplotlib
548 549 self.mpl_backend = matplotlib.rcParams['backend']
549 550
550 551 # we also need to block switching of interactive backends by use()
551 552 self.mpl_use = matplotlib.use
552 553 self.mpl_use._called = False
553 554 # overwrite the original matplotlib.use with our wrapper
554 555 matplotlib.use = use
555 556
556 557 # This must be imported last in the matplotlib series, after
557 558 # backend/interactivity choices have been made
558 559 import matplotlib.pylab as pylab
559 560 self.pylab = pylab
560 561
561 562 self.pylab.show._needmain = False
562 563 # We need to detect at runtime whether show() is called by the user.
563 564 # For this, we wrap it into a decorator which adds a 'called' flag.
564 565 self.pylab.draw_if_interactive = flag_calls(self.pylab.draw_if_interactive)
565 566
566 567 # Build a user namespace initialized with matplotlib/matlab features.
567 568 user_ns, user_global_ns = IPython.ipapi.make_user_namespaces(user_ns,
568 569 user_global_ns)
569 570
570 571 # Import numpy as np/pyplot as plt are conventions we're trying to
571 572 # somewhat standardize on. Making them available to users by default
572 573 # will greatly help this.
573 574 exec ("import numpy\n"
574 575 "import numpy as np\n"
575 576 "import matplotlib\n"
576 577 "import matplotlib.pylab as pylab\n"
577 578 "try:\n"
578 579 " import matplotlib.pyplot as plt\n"
579 580 "except ImportError:\n"
580 581 " pass\n"
581 582 ) in user_ns
582 583
583 584 # Build matplotlib info banner
584 585 b="""
585 586 Welcome to pylab, a matplotlib-based Python environment.
586 587 For more information, type 'help(pylab)'.
587 588 """
588 589 return user_ns,user_global_ns,b
589 590
590 591 def mplot_exec(self,fname,*where,**kw):
591 592 """Execute a matplotlib script.
592 593
593 594 This is a call to execfile(), but wrapped in safeties to properly
594 595 handle interactive rendering and backend switching."""
595 596
596 597 #print '*** Matplotlib runner ***' # dbg
597 598 # turn off rendering until end of script
598 599 isInteractive = self.matplotlib.rcParams['interactive']
599 600 self.matplotlib.interactive(False)
600 601 self.safe_execfile(fname,*where,**kw)
601 602 self.matplotlib.interactive(isInteractive)
602 603 # make rendering call now, if the user tried to do it
603 604 if self.pylab.draw_if_interactive.called:
604 605 self.pylab.draw()
605 606 self.pylab.draw_if_interactive.called = False
606 607
607 608 # if a backend switch was performed, reverse it now
608 609 if self.mpl_use._called:
609 610 self.matplotlib.rcParams['backend'] = self.mpl_backend
610
611
612 @testdec.skip_doctest
611 613 def magic_run(self,parameter_s=''):
612 614 Magic.magic_run(self,parameter_s,runner=self.mplot_exec)
613 615
614 616 # Fix the docstring so users see the original as well
615 617 magic_run.__doc__ = "%s\n%s" % (Magic.magic_run.__doc__,
616 618 "\n *** Modified %run for Matplotlib,"
617 619 " with proper interactive handling ***")
618 620
619 621 # Now we provide 2 versions of a matplotlib-aware IPython base shells, single
620 622 # and multithreaded. Note that these are meant for internal use, the IPShell*
621 623 # classes below are the ones meant for public consumption.
622 624
623 625 class MatplotlibShell(MatplotlibShellBase,InteractiveShell):
624 626 """Single-threaded shell with matplotlib support."""
625 627
626 628 def __init__(self,name,usage=None,rc=Struct(opts=None,args=None),
627 629 user_ns=None,user_global_ns=None,**kw):
628 630 user_ns,user_global_ns,b2 = self._matplotlib_config(name,user_ns,user_global_ns)
629 631 InteractiveShell.__init__(self,name,usage,rc,user_ns,user_global_ns,
630 632 banner2=b2,**kw)
631 633
632 634 class MatplotlibMTShell(MatplotlibShellBase,MTInteractiveShell):
633 635 """Multi-threaded shell with matplotlib support."""
634 636
635 637 def __init__(self,name,usage=None,rc=Struct(opts=None,args=None),
636 638 user_ns=None,user_global_ns=None, **kw):
637 639 user_ns,user_global_ns,b2 = self._matplotlib_config(name,user_ns,user_global_ns)
638 640 MTInteractiveShell.__init__(self,name,usage,rc,user_ns,user_global_ns,
639 641 banner2=b2,**kw)
640 642
641 643 #-----------------------------------------------------------------------------
642 644 # Utility functions for the different GUI enabled IPShell* classes.
643 645
644 646 def get_tk():
645 647 """Tries to import Tkinter and returns a withdrawn Tkinter root
646 648 window. If Tkinter is already imported or not available, this
647 649 returns None. This function calls `hijack_tk` underneath.
648 650 """
649 651 if not USE_TK or sys.modules.has_key('Tkinter'):
650 652 return None
651 653 else:
652 654 try:
653 655 import Tkinter
654 656 except ImportError:
655 657 return None
656 658 else:
657 659 hijack_tk()
658 660 r = Tkinter.Tk()
659 661 r.withdraw()
660 662 return r
661 663
662 664 def hijack_tk():
663 665 """Modifies Tkinter's mainloop with a dummy so when a module calls
664 666 mainloop, it does not block.
665 667
666 668 """
667 669 def misc_mainloop(self, n=0):
668 670 pass
669 671 def tkinter_mainloop(n=0):
670 672 pass
671 673
672 674 import Tkinter
673 675 Tkinter.Misc.mainloop = misc_mainloop
674 676 Tkinter.mainloop = tkinter_mainloop
675 677
676 678 def update_tk(tk):
677 679 """Updates the Tkinter event loop. This is typically called from
678 680 the respective WX or GTK mainloops.
679 681 """
680 682 if tk:
681 683 tk.update()
682 684
683 685 def hijack_wx():
684 686 """Modifies wxPython's MainLoop with a dummy so user code does not
685 687 block IPython. The hijacked mainloop function is returned.
686 688 """
687 689 def dummy_mainloop(*args, **kw):
688 690 pass
689 691
690 692 try:
691 693 import wx
692 694 except ImportError:
693 695 # For very old versions of WX
694 696 import wxPython as wx
695 697
696 698 ver = wx.__version__
697 699 orig_mainloop = None
698 700 if ver[:3] >= '2.5':
699 701 import wx
700 702 if hasattr(wx, '_core_'): core = getattr(wx, '_core_')
701 703 elif hasattr(wx, '_core'): core = getattr(wx, '_core')
702 704 else: raise AttributeError('Could not find wx core module')
703 705 orig_mainloop = core.PyApp_MainLoop
704 706 core.PyApp_MainLoop = dummy_mainloop
705 707 elif ver[:3] == '2.4':
706 708 orig_mainloop = wx.wxc.wxPyApp_MainLoop
707 709 wx.wxc.wxPyApp_MainLoop = dummy_mainloop
708 710 else:
709 711 warn("Unable to find either wxPython version 2.4 or >= 2.5.")
710 712 return orig_mainloop
711 713
712 714 def hijack_gtk():
713 715 """Modifies pyGTK's mainloop with a dummy so user code does not
714 716 block IPython. This function returns the original `gtk.mainloop`
715 717 function that has been hijacked.
716 718 """
717 719 def dummy_mainloop(*args, **kw):
718 720 pass
719 721 import gtk
720 722 if gtk.pygtk_version >= (2,4,0): orig_mainloop = gtk.main
721 723 else: orig_mainloop = gtk.mainloop
722 724 gtk.mainloop = dummy_mainloop
723 725 gtk.main = dummy_mainloop
724 726 return orig_mainloop
725 727
726 728 def hijack_qt():
727 729 """Modifies PyQt's mainloop with a dummy so user code does not
728 730 block IPython. This function returns the original
729 731 `qt.qApp.exec_loop` function that has been hijacked.
730 732 """
731 733 def dummy_mainloop(*args, **kw):
732 734 pass
733 735 import qt
734 736 orig_mainloop = qt.qApp.exec_loop
735 737 qt.qApp.exec_loop = dummy_mainloop
736 738 qt.QApplication.exec_loop = dummy_mainloop
737 739 return orig_mainloop
738 740
739 741 def hijack_qt4():
740 742 """Modifies PyQt4's mainloop with a dummy so user code does not
741 743 block IPython. This function returns the original
742 744 `QtGui.qApp.exec_` function that has been hijacked.
743 745 """
744 746 def dummy_mainloop(*args, **kw):
745 747 pass
746 748 from PyQt4 import QtGui, QtCore
747 749 orig_mainloop = QtGui.qApp.exec_
748 750 QtGui.qApp.exec_ = dummy_mainloop
749 751 QtGui.QApplication.exec_ = dummy_mainloop
750 752 QtCore.QCoreApplication.exec_ = dummy_mainloop
751 753 return orig_mainloop
752 754
753 755 #-----------------------------------------------------------------------------
754 756 # The IPShell* classes below are the ones meant to be run by external code as
755 757 # IPython instances. Note that unless a specific threading strategy is
756 758 # desired, the factory function start() below should be used instead (it
757 759 # selects the proper threaded class).
758 760
759 761 class IPThread(threading.Thread):
760 762 def run(self):
761 763 self.IP.mainloop(self._banner)
762 764 self.IP.kill()
763 765
764 766 class IPShellGTK(IPThread):
765 767 """Run a gtk mainloop() in a separate thread.
766 768
767 769 Python commands can be passed to the thread where they will be executed.
768 770 This is implemented by periodically checking for passed code using a
769 771 GTK timeout callback."""
770 772
771 773 TIMEOUT = 100 # Millisecond interval between timeouts.
772 774
773 775 def __init__(self,argv=None,user_ns=None,user_global_ns=None,
774 776 debug=1,shell_class=MTInteractiveShell):
775 777
776 778 import gtk
779 # Check for set_interactive, coming up in new pygtk.
780 # Disable it so that this code works, but notify
781 # the user that he has a better option as well.
782 # XXX TODO better support when set_interactive is released
783 try:
784 gtk.set_interactive(False)
785 print "Your PyGtk has set_interactive(), so you can use the"
786 print "more stable single-threaded Gtk mode."
787 print "See https://bugs.launchpad.net/ipython/+bug/270856"
788 except AttributeError:
789 pass
777 790
778 791 self.gtk = gtk
779 792 self.gtk_mainloop = hijack_gtk()
780 793
781 794 # Allows us to use both Tk and GTK.
782 795 self.tk = get_tk()
783 796
784 797 if gtk.pygtk_version >= (2,4,0): mainquit = self.gtk.main_quit
785 798 else: mainquit = self.gtk.mainquit
786 799
787 800 self.IP = make_IPython(argv,user_ns=user_ns,
788 801 user_global_ns=user_global_ns,
789 802 debug=debug,
790 803 shell_class=shell_class,
791 804 on_kill=[mainquit])
792 805
793 806 # HACK: slot for banner in self; it will be passed to the mainloop
794 807 # method only and .run() needs it. The actual value will be set by
795 808 # .mainloop().
796 809 self._banner = None
797 810
798 811 threading.Thread.__init__(self)
799 812
800 813 def mainloop(self,sys_exit=0,banner=None):
801 814
802 815 self._banner = banner
803 816
804 817 if self.gtk.pygtk_version >= (2,4,0):
805 818 import gobject
806 819 gobject.idle_add(self.on_timer)
807 820 else:
808 821 self.gtk.idle_add(self.on_timer)
809 822
810 823 if sys.platform != 'win32':
811 824 try:
812 825 if self.gtk.gtk_version[0] >= 2:
813 826 self.gtk.gdk.threads_init()
814 827 except AttributeError:
815 828 pass
816 829 except RuntimeError:
817 830 error('Your pyGTK likely has not been compiled with '
818 831 'threading support.\n'
819 832 'The exception printout is below.\n'
820 833 'You can either rebuild pyGTK with threads, or '
821 834 'try using \n'
822 835 'matplotlib with a different backend (like Tk or WX).\n'
823 836 'Note that matplotlib will most likely not work in its '
824 837 'current state!')
825 838 self.IP.InteractiveTB()
826 839
827 840 self.start()
828 841 self.gtk.gdk.threads_enter()
829 842 self.gtk_mainloop()
830 843 self.gtk.gdk.threads_leave()
831 844 self.join()
832 845
833 846 def on_timer(self):
834 847 """Called when GTK is idle.
835 848
836 849 Must return True always, otherwise GTK stops calling it"""
837 850
838 851 update_tk(self.tk)
839 852 self.IP.runcode()
840 853 time.sleep(0.01)
841 854 return True
842 855
843 856
844 857 class IPShellWX(IPThread):
845 858 """Run a wx mainloop() in a separate thread.
846 859
847 860 Python commands can be passed to the thread where they will be executed.
848 861 This is implemented by periodically checking for passed code using a
849 862 GTK timeout callback."""
850 863
851 864 TIMEOUT = 100 # Millisecond interval between timeouts.
852 865
853 866 def __init__(self,argv=None,user_ns=None,user_global_ns=None,
854 867 debug=1,shell_class=MTInteractiveShell):
855 868
856 869 self.IP = make_IPython(argv,user_ns=user_ns,
857 870 user_global_ns=user_global_ns,
858 871 debug=debug,
859 872 shell_class=shell_class,
860 873 on_kill=[self.wxexit])
861 874
862 875 wantedwxversion=self.IP.rc.wxversion
863 876 if wantedwxversion!="0":
864 877 try:
865 878 import wxversion
866 879 except ImportError:
867 880 error('The wxversion module is needed for WX version selection')
868 881 else:
869 882 try:
870 883 wxversion.select(wantedwxversion)
871 884 except:
872 885 self.IP.InteractiveTB()
873 886 error('Requested wxPython version %s could not be loaded' %
874 887 wantedwxversion)
875 888
876 889 import wx
877 890
878 891 threading.Thread.__init__(self)
879 892 self.wx = wx
880 893 self.wx_mainloop = hijack_wx()
881 894
882 895 # Allows us to use both Tk and GTK.
883 896 self.tk = get_tk()
884 897
885 898 # HACK: slot for banner in self; it will be passed to the mainloop
886 899 # method only and .run() needs it. The actual value will be set by
887 900 # .mainloop().
888 901 self._banner = None
889 902
890 903 self.app = None
891 904
892 905 def wxexit(self, *args):
893 906 if self.app is not None:
894 907 self.app.agent.timer.Stop()
895 908 self.app.ExitMainLoop()
896 909
897 910 def mainloop(self,sys_exit=0,banner=None):
898 911
899 912 self._banner = banner
900 913
901 914 self.start()
902 915
903 916 class TimerAgent(self.wx.MiniFrame):
904 917 wx = self.wx
905 918 IP = self.IP
906 919 tk = self.tk
907 920 def __init__(self, parent, interval):
908 921 style = self.wx.DEFAULT_FRAME_STYLE | self.wx.TINY_CAPTION_HORIZ
909 922 self.wx.MiniFrame.__init__(self, parent, -1, ' ', pos=(200, 200),
910 923 size=(100, 100),style=style)
911 924 self.Show(False)
912 925 self.interval = interval
913 926 self.timerId = self.wx.NewId()
914 927
915 928 def StartWork(self):
916 929 self.timer = self.wx.Timer(self, self.timerId)
917 930 self.wx.EVT_TIMER(self, self.timerId, self.OnTimer)
918 931 self.timer.Start(self.interval)
919 932
920 933 def OnTimer(self, event):
921 934 update_tk(self.tk)
922 935 self.IP.runcode()
923 936
924 937 class App(self.wx.App):
925 938 wx = self.wx
926 939 TIMEOUT = self.TIMEOUT
927 940 def OnInit(self):
928 941 'Create the main window and insert the custom frame'
929 942 self.agent = TimerAgent(None, self.TIMEOUT)
930 943 self.agent.Show(False)
931 944 self.agent.StartWork()
932 945 return True
933 946
934 947 self.app = App(redirect=False)
935 948 self.wx_mainloop(self.app)
936 949 self.join()
937 950
938 951
939 952 class IPShellQt(IPThread):
940 953 """Run a Qt event loop in a separate thread.
941 954
942 955 Python commands can be passed to the thread where they will be executed.
943 956 This is implemented by periodically checking for passed code using a
944 957 Qt timer / slot."""
945 958
946 959 TIMEOUT = 100 # Millisecond interval between timeouts.
947 960
948 961 def __init__(self, argv=None, user_ns=None, user_global_ns=None,
949 962 debug=0, shell_class=MTInteractiveShell):
950 963
951 964 import qt
952 965
953 966 self.exec_loop = hijack_qt()
954 967
955 968 # Allows us to use both Tk and QT.
956 969 self.tk = get_tk()
957 970
958 971 self.IP = make_IPython(argv,
959 972 user_ns=user_ns,
960 973 user_global_ns=user_global_ns,
961 974 debug=debug,
962 975 shell_class=shell_class,
963 976 on_kill=[qt.qApp.exit])
964 977
965 978 # HACK: slot for banner in self; it will be passed to the mainloop
966 979 # method only and .run() needs it. The actual value will be set by
967 980 # .mainloop().
968 981 self._banner = None
969 982
970 983 threading.Thread.__init__(self)
971 984
972 985 def mainloop(self, sys_exit=0, banner=None):
973 986
974 987 import qt
975 988
976 989 self._banner = banner
977 990
978 991 if qt.QApplication.startingUp():
979 992 a = qt.QApplication(sys.argv)
980 993
981 994 self.timer = qt.QTimer()
982 995 qt.QObject.connect(self.timer,
983 996 qt.SIGNAL('timeout()'),
984 997 self.on_timer)
985 998
986 999 self.start()
987 1000 self.timer.start(self.TIMEOUT, True)
988 1001 while True:
989 1002 if self.IP._kill: break
990 1003 self.exec_loop()
991 1004 self.join()
992 1005
993 1006 def on_timer(self):
994 1007 update_tk(self.tk)
995 1008 result = self.IP.runcode()
996 1009 self.timer.start(self.TIMEOUT, True)
997 1010 return result
998 1011
999 1012
1000 1013 class IPShellQt4(IPThread):
1001 1014 """Run a Qt event loop in a separate thread.
1002 1015
1003 1016 Python commands can be passed to the thread where they will be executed.
1004 1017 This is implemented by periodically checking for passed code using a
1005 1018 Qt timer / slot."""
1006 1019
1007 1020 TIMEOUT = 100 # Millisecond interval between timeouts.
1008 1021
1009 1022 def __init__(self, argv=None, user_ns=None, user_global_ns=None,
1010 1023 debug=0, shell_class=MTInteractiveShell):
1011 1024
1012 1025 from PyQt4 import QtCore, QtGui
1013 1026
1014 1027 try:
1015 1028 # present in PyQt4-4.2.1 or later
1016 1029 QtCore.pyqtRemoveInputHook()
1017 1030 except AttributeError:
1018 1031 pass
1019 1032
1020 1033 if QtCore.PYQT_VERSION_STR == '4.3':
1021 1034 warn('''PyQt4 version 4.3 detected.
1022 1035 If you experience repeated threading warnings, please update PyQt4.
1023 1036 ''')
1024 1037
1025 1038 self.exec_ = hijack_qt4()
1026 1039
1027 1040 # Allows us to use both Tk and QT.
1028 1041 self.tk = get_tk()
1029 1042
1030 1043 self.IP = make_IPython(argv,
1031 1044 user_ns=user_ns,
1032 1045 user_global_ns=user_global_ns,
1033 1046 debug=debug,
1034 1047 shell_class=shell_class,
1035 1048 on_kill=[QtGui.qApp.exit])
1036 1049
1037 1050 # HACK: slot for banner in self; it will be passed to the mainloop
1038 1051 # method only and .run() needs it. The actual value will be set by
1039 1052 # .mainloop().
1040 1053 self._banner = None
1041 1054
1042 1055 threading.Thread.__init__(self)
1043 1056
1044 1057 def mainloop(self, sys_exit=0, banner=None):
1045 1058
1046 1059 from PyQt4 import QtCore, QtGui
1047 1060
1048 1061 self._banner = banner
1049 1062
1050 1063 if QtGui.QApplication.startingUp():
1051 1064 a = QtGui.QApplication(sys.argv)
1052 1065
1053 1066 self.timer = QtCore.QTimer()
1054 1067 QtCore.QObject.connect(self.timer,
1055 1068 QtCore.SIGNAL('timeout()'),
1056 1069 self.on_timer)
1057 1070
1058 1071 self.start()
1059 1072 self.timer.start(self.TIMEOUT)
1060 1073 while True:
1061 1074 if self.IP._kill: break
1062 1075 self.exec_()
1063 1076 self.join()
1064 1077
1065 1078 def on_timer(self):
1066 1079 update_tk(self.tk)
1067 1080 result = self.IP.runcode()
1068 1081 self.timer.start(self.TIMEOUT)
1069 1082 return result
1070 1083
1071 1084
1072 1085 # A set of matplotlib public IPython shell classes, for single-threaded (Tk*
1073 1086 # and FLTK*) and multithreaded (GTK*, WX* and Qt*) backends to use.
1074 1087 def _load_pylab(user_ns):
1075 1088 """Allow users to disable pulling all of pylab into the top-level
1076 1089 namespace.
1077 1090
1078 1091 This little utility must be called AFTER the actual ipython instance is
1079 1092 running, since only then will the options file have been fully parsed."""
1080 1093
1081 1094 ip = IPython.ipapi.get()
1082 1095 if ip.options.pylab_import_all:
1083 1096 ip.ex("from matplotlib.pylab import *")
1084 1097 ip.IP.user_config_ns.update(ip.user_ns)
1085 1098
1086 1099
1087 1100 class IPShellMatplotlib(IPShell):
1088 1101 """Subclass IPShell with MatplotlibShell as the internal shell.
1089 1102
1090 1103 Single-threaded class, meant for the Tk* and FLTK* backends.
1091 1104
1092 1105 Having this on a separate class simplifies the external driver code."""
1093 1106
1094 1107 def __init__(self,argv=None,user_ns=None,user_global_ns=None,debug=1):
1095 1108 IPShell.__init__(self,argv,user_ns,user_global_ns,debug,
1096 1109 shell_class=MatplotlibShell)
1097 1110 _load_pylab(self.IP.user_ns)
1098 1111
1099 1112 class IPShellMatplotlibGTK(IPShellGTK):
1100 1113 """Subclass IPShellGTK with MatplotlibMTShell as the internal shell.
1101 1114
1102 1115 Multi-threaded class, meant for the GTK* backends."""
1103 1116
1104 1117 def __init__(self,argv=None,user_ns=None,user_global_ns=None,debug=1):
1105 1118 IPShellGTK.__init__(self,argv,user_ns,user_global_ns,debug,
1106 1119 shell_class=MatplotlibMTShell)
1107 1120 _load_pylab(self.IP.user_ns)
1108 1121
1109 1122 class IPShellMatplotlibWX(IPShellWX):
1110 1123 """Subclass IPShellWX with MatplotlibMTShell as the internal shell.
1111 1124
1112 1125 Multi-threaded class, meant for the WX* backends."""
1113 1126
1114 1127 def __init__(self,argv=None,user_ns=None,user_global_ns=None,debug=1):
1115 1128 IPShellWX.__init__(self,argv,user_ns,user_global_ns,debug,
1116 1129 shell_class=MatplotlibMTShell)
1117 1130 _load_pylab(self.IP.user_ns)
1118 1131
1119 1132 class IPShellMatplotlibQt(IPShellQt):
1120 1133 """Subclass IPShellQt with MatplotlibMTShell as the internal shell.
1121 1134
1122 1135 Multi-threaded class, meant for the Qt* backends."""
1123 1136
1124 1137 def __init__(self,argv=None,user_ns=None,user_global_ns=None,debug=1):
1125 1138 IPShellQt.__init__(self,argv,user_ns,user_global_ns,debug,
1126 1139 shell_class=MatplotlibMTShell)
1127 1140 _load_pylab(self.IP.user_ns)
1128 1141
1129 1142 class IPShellMatplotlibQt4(IPShellQt4):
1130 1143 """Subclass IPShellQt4 with MatplotlibMTShell as the internal shell.
1131 1144
1132 1145 Multi-threaded class, meant for the Qt4* backends."""
1133 1146
1134 1147 def __init__(self,argv=None,user_ns=None,user_global_ns=None,debug=1):
1135 1148 IPShellQt4.__init__(self,argv,user_ns,user_global_ns,debug,
1136 1149 shell_class=MatplotlibMTShell)
1137 1150 _load_pylab(self.IP.user_ns)
1138 1151
1139 1152 #-----------------------------------------------------------------------------
1140 1153 # Factory functions to actually start the proper thread-aware shell
1141 1154
1142 1155 def _select_shell(argv):
1143 1156 """Select a shell from the given argv vector.
1144 1157
1145 1158 This function implements the threading selection policy, allowing runtime
1146 1159 control of the threading mode, both for general users and for matplotlib.
1147 1160
1148 1161 Return:
1149 1162 Shell class to be instantiated for runtime operation.
1150 1163 """
1151 1164
1152 1165 global USE_TK
1153 1166
1154 1167 mpl_shell = {'gthread' : IPShellMatplotlibGTK,
1155 1168 'wthread' : IPShellMatplotlibWX,
1156 1169 'qthread' : IPShellMatplotlibQt,
1157 1170 'q4thread' : IPShellMatplotlibQt4,
1158 1171 'tkthread' : IPShellMatplotlib, # Tk is built-in
1159 1172 }
1160 1173
1161 1174 th_shell = {'gthread' : IPShellGTK,
1162 1175 'wthread' : IPShellWX,
1163 1176 'qthread' : IPShellQt,
1164 1177 'q4thread' : IPShellQt4,
1165 1178 'tkthread' : IPShell, # Tk is built-in
1166 1179 }
1167 1180
1168 1181 backends = {'gthread' : 'GTKAgg',
1169 1182 'wthread' : 'WXAgg',
1170 1183 'qthread' : 'QtAgg',
1171 1184 'q4thread' :'Qt4Agg',
1172 1185 'tkthread' :'TkAgg',
1173 1186 }
1174 1187
1175 1188 all_opts = set(['tk','pylab','gthread','qthread','q4thread','wthread',
1176 1189 'tkthread'])
1177 1190 user_opts = set([s.replace('-','') for s in argv[:3]])
1178 1191 special_opts = user_opts & all_opts
1179 1192
1180 1193 if 'tk' in special_opts:
1181 1194 USE_TK = True
1182 1195 special_opts.remove('tk')
1183 1196
1184 1197 if 'pylab' in special_opts:
1185 1198
1186 1199 try:
1187 1200 import matplotlib
1188 1201 except ImportError:
1189 1202 error('matplotlib could NOT be imported! Starting normal IPython.')
1190 1203 return IPShell
1191 1204
1192 1205 special_opts.remove('pylab')
1193 1206 # If there's any option left, it means the user wants to force the
1194 1207 # threading backend, else it's auto-selected from the rc file
1195 1208 if special_opts:
1196 1209 th_mode = special_opts.pop()
1197 1210 matplotlib.rcParams['backend'] = backends[th_mode]
1198 1211 else:
1199 1212 backend = matplotlib.rcParams['backend']
1200 1213 if backend.startswith('GTK'):
1201 1214 th_mode = 'gthread'
1202 1215 elif backend.startswith('WX'):
1203 1216 th_mode = 'wthread'
1204 1217 elif backend.startswith('Qt4'):
1205 1218 th_mode = 'q4thread'
1206 1219 elif backend.startswith('Qt'):
1207 1220 th_mode = 'qthread'
1208 1221 else:
1209 1222 # Any other backend, use plain Tk
1210 1223 th_mode = 'tkthread'
1211 1224
1212 1225 return mpl_shell[th_mode]
1213 1226 else:
1214 1227 # No pylab requested, just plain threads
1215 1228 try:
1216 1229 th_mode = special_opts.pop()
1217 1230 except KeyError:
1218 1231 th_mode = 'tkthread'
1219 1232 return th_shell[th_mode]
1220 1233
1221 1234
1222 1235 # This is the one which should be called by external code.
1223 1236 def start(user_ns = None):
1224 1237 """Return a running shell instance, dealing with threading options.
1225 1238
1226 1239 This is a factory function which will instantiate the proper IPython shell
1227 1240 based on the user's threading choice. Such a selector is needed because
1228 1241 different GUI toolkits require different thread handling details."""
1229 1242
1230 1243 shell = _select_shell(sys.argv)
1231 1244 return shell(user_ns = user_ns)
1232 1245
1233 1246 # Some aliases for backwards compatibility
1234 1247 IPythonShell = IPShell
1235 1248 IPythonShellEmbed = IPShellEmbed
1236 1249 #************************ End of file <Shell.py> ***************************
@@ -1,43 +1,45
1 1 # -*- Mode: Shell-Script -*- Not really, but shows comments correctly
2 2 #***************************************************************************
3 3 #
4 4 # Configuration file for ipython -- ipythonrc format
5 5 #
6 6 # The format of this file is one of 'key value' lines.
7 7 # Lines containing only whitespace at the beginning and then a # are ignored
8 8 # as comments. But comments can NOT be put on lines with data.
9 9 #***************************************************************************
10 10
11 11 # If this file is found in the user's ~/.ipython directory as
12 12 # ipythonrc-physics, it can be loaded by calling passing the '-profile
13 13 # physics' (or '-p physics') option to IPython.
14 14
15 15 # This profile loads modules useful for doing interactive calculations with
16 16 # physical quantities (with units). It relies on modules from Konrad Hinsen's
17 17 # ScientificPython (http://dirac.cnrs-orleans.fr/ScientificPython/)
18 18
19 19 # First load basic user configuration
20 20 include ipythonrc
21 21
22 22 # import ...
23 23 # Module with alternate input syntax for PhysicalQuantity objects.
24 24 import_mod IPython.Extensions.PhysicalQInput
25 25
26 26 # from ... import *
27 27 # math CANNOT be imported after PhysicalQInteractive. It will override the
28 28 # functions defined there.
29 29 import_all math IPython.Extensions.PhysicalQInteractive
30 30
31 31 # from ... import ...
32 32 import_some
33 33
34 34 # code
35 35 execute q = PhysicalQuantityInteractive
36 36 execute g = PhysicalQuantityInteractive('9.8 m/s**2')
37 37 ececute rad = pi/180.
38 38 execute print '*** q is an alias for PhysicalQuantityInteractive'
39 39 execute print '*** g = 9.8 m/s^2 has been defined'
40 40 execute print '*** rad = pi/180 has been defined'
41 execute import ipy_constants as C
42 execute print '*** C is the physical constants module'
41 43
42 44 # Files to execute
43 45 execfile
@@ -1,637 +1,640
1 1 """Word completion for IPython.
2 2
3 3 This module is a fork of the rlcompleter module in the Python standard
4 4 library. The original enhancements made to rlcompleter have been sent
5 5 upstream and were accepted as of Python 2.3, but we need a lot more
6 6 functionality specific to IPython, so this module will continue to live as an
7 7 IPython-specific utility.
8 8
9 9 ---------------------------------------------------------------------------
10 10 Original rlcompleter documentation:
11 11
12 12 This requires the latest extension to the readline module (the
13 13 completes keywords, built-ins and globals in __main__; when completing
14 14 NAME.NAME..., it evaluates (!) the expression up to the last dot and
15 15 completes its attributes.
16 16
17 17 It's very cool to do "import string" type "string.", hit the
18 18 completion key (twice), and see the list of names defined by the
19 19 string module!
20 20
21 21 Tip: to use the tab key as the completion key, call
22 22
23 23 readline.parse_and_bind("tab: complete")
24 24
25 25 Notes:
26 26
27 27 - Exceptions raised by the completer function are *ignored* (and
28 28 generally cause the completion to fail). This is a feature -- since
29 29 readline sets the tty device in raw (or cbreak) mode, printing a
30 30 traceback wouldn't work well without some complicated hoopla to save,
31 31 reset and restore the tty state.
32 32
33 33 - The evaluation of the NAME.NAME... form may cause arbitrary
34 34 application defined code to be executed if an object with a
35 35 __getattr__ hook is found. Since it is the responsibility of the
36 36 application (or the user) to enable this feature, I consider this an
37 37 acceptable risk. More complicated expressions (e.g. function calls or
38 38 indexing operations) are *not* evaluated.
39 39
40 40 - GNU readline is also used by the built-in functions input() and
41 41 raw_input(), and thus these also benefit/suffer from the completer
42 42 features. Clearly an interactive application can benefit by
43 43 specifying its own completer function and using raw_input() for all
44 44 its input.
45 45
46 46 - When the original stdin is not a tty device, GNU readline is never
47 47 used, and this module (and the readline module) are silently inactive.
48 48
49 49 """
50 50
51 51 #*****************************************************************************
52 52 #
53 53 # Since this file is essentially a minimally modified copy of the rlcompleter
54 54 # module which is part of the standard Python distribution, I assume that the
55 55 # proper procedure is to maintain its copyright as belonging to the Python
56 56 # Software Foundation (in addition to my own, for all new code).
57 57 #
58 58 # Copyright (C) 2001 Python Software Foundation, www.python.org
59 59 # Copyright (C) 2001-2006 Fernando Perez. <fperez@colorado.edu>
60 60 #
61 61 # Distributed under the terms of the BSD License. The full license is in
62 62 # the file COPYING, distributed as part of this software.
63 63 #
64 64 #*****************************************************************************
65 65
66 66 import __builtin__
67 67 import __main__
68 68 import glob
69 69 import keyword
70 70 import os
71 71 import re
72 72 import shlex
73 73 import sys
74 74 import IPython.rlineimpl as readline
75 75 import itertools
76 76 from IPython.ipstruct import Struct
77 77 from IPython import ipapi
78 78 from IPython import generics
79 79 import types
80 80
81 81 # Python 2.4 offers sets as a builtin
82 82 try:
83 83 set()
84 84 except NameError:
85 85 from sets import Set as set
86 86
87 87 from IPython.genutils import debugx, dir2
88 88
89 89 __all__ = ['Completer','IPCompleter']
90 90
91 91 class Completer:
92 92 def __init__(self,namespace=None,global_namespace=None):
93 93 """Create a new completer for the command line.
94 94
95 95 Completer([namespace,global_namespace]) -> completer instance.
96 96
97 97 If unspecified, the default namespace where completions are performed
98 98 is __main__ (technically, __main__.__dict__). Namespaces should be
99 99 given as dictionaries.
100 100
101 101 An optional second namespace can be given. This allows the completer
102 102 to handle cases where both the local and global scopes need to be
103 103 distinguished.
104 104
105 105 Completer instances should be used as the completion mechanism of
106 106 readline via the set_completer() call:
107 107
108 108 readline.set_completer(Completer(my_namespace).complete)
109 109 """
110 110
111 111 # Don't bind to namespace quite yet, but flag whether the user wants a
112 112 # specific namespace or to use __main__.__dict__. This will allow us
113 113 # to bind to __main__.__dict__ at completion time, not now.
114 114 if namespace is None:
115 115 self.use_main_ns = 1
116 116 else:
117 117 self.use_main_ns = 0
118 118 self.namespace = namespace
119 119
120 120 # The global namespace, if given, can be bound directly
121 121 if global_namespace is None:
122 122 self.global_namespace = {}
123 123 else:
124 124 self.global_namespace = global_namespace
125 125
126 126 def complete(self, text, state):
127 127 """Return the next possible completion for 'text'.
128 128
129 129 This is called successively with state == 0, 1, 2, ... until it
130 130 returns None. The completion should begin with 'text'.
131 131
132 132 """
133 133 if self.use_main_ns:
134 134 self.namespace = __main__.__dict__
135 135
136 136 if state == 0:
137 137 if "." in text:
138 138 self.matches = self.attr_matches(text)
139 139 else:
140 140 self.matches = self.global_matches(text)
141 141 try:
142 142 return self.matches[state]
143 143 except IndexError:
144 144 return None
145 145
146 146 def global_matches(self, text):
147 147 """Compute matches when text is a simple name.
148 148
149 149 Return a list of all keywords, built-in functions and names currently
150 150 defined in self.namespace or self.global_namespace that match.
151 151
152 152 """
153 153 matches = []
154 154 match_append = matches.append
155 155 n = len(text)
156 156 for lst in [keyword.kwlist,
157 157 __builtin__.__dict__.keys(),
158 158 self.namespace.keys(),
159 159 self.global_namespace.keys()]:
160 160 for word in lst:
161 161 if word[:n] == text and word != "__builtins__":
162 162 match_append(word)
163 163 return matches
164 164
165 165 def attr_matches(self, text):
166 166 """Compute matches when text contains a dot.
167 167
168 168 Assuming the text is of the form NAME.NAME....[NAME], and is
169 169 evaluatable in self.namespace or self.global_namespace, it will be
170 170 evaluated and its attributes (as revealed by dir()) are used as
171 171 possible completions. (For class instances, class members are are
172 172 also considered.)
173 173
174 174 WARNING: this can still invoke arbitrary C code, if an object
175 175 with a __getattr__ hook is evaluated.
176 176
177 177 """
178 178 import re
179 179
180 180 # Another option, seems to work great. Catches things like ''.<tab>
181 181 m = re.match(r"(\S+(\.\w+)*)\.(\w*)$", text)
182 182
183 183 if not m:
184 184 return []
185 185
186 186 expr, attr = m.group(1, 3)
187 187 try:
188 188 obj = eval(expr, self.namespace)
189 189 except:
190 190 try:
191 191 obj = eval(expr, self.global_namespace)
192 192 except:
193 193 return []
194 194
195 195 words = dir2(obj)
196 196
197 197 try:
198 198 words = generics.complete_object(obj, words)
199 199 except ipapi.TryNext:
200 200 pass
201 201 # Build match list to return
202 202 n = len(attr)
203 203 res = ["%s.%s" % (expr, w) for w in words if w[:n] == attr ]
204 204 return res
205 205
206 206 class IPCompleter(Completer):
207 207 """Extension of the completer class with IPython-specific features"""
208 208
209 209 def __init__(self,shell,namespace=None,global_namespace=None,
210 210 omit__names=0,alias_table=None):
211 211 """IPCompleter() -> completer
212 212
213 213 Return a completer object suitable for use by the readline library
214 214 via readline.set_completer().
215 215
216 216 Inputs:
217 217
218 218 - shell: a pointer to the ipython shell itself. This is needed
219 219 because this completer knows about magic functions, and those can
220 220 only be accessed via the ipython instance.
221 221
222 222 - namespace: an optional dict where completions are performed.
223 223
224 224 - global_namespace: secondary optional dict for completions, to
225 225 handle cases (such as IPython embedded inside functions) where
226 226 both Python scopes are visible.
227 227
228 228 - The optional omit__names parameter sets the completer to omit the
229 229 'magic' names (__magicname__) for python objects unless the text
230 230 to be completed explicitly starts with one or more underscores.
231 231
232 232 - If alias_table is supplied, it should be a dictionary of aliases
233 233 to complete. """
234 234
235 235 Completer.__init__(self,namespace,global_namespace)
236 236 self.magic_prefix = shell.name+'.magic_'
237 237 self.magic_escape = shell.ESC_MAGIC
238 238 self.readline = readline
239 239 delims = self.readline.get_completer_delims()
240 240 delims = delims.replace(self.magic_escape,'')
241 241 self.readline.set_completer_delims(delims)
242 242 self.get_line_buffer = self.readline.get_line_buffer
243 243 self.get_endidx = self.readline.get_endidx
244 244 self.omit__names = omit__names
245 245 self.merge_completions = shell.rc.readline_merge_completions
246 246 if alias_table is None:
247 247 alias_table = {}
248 248 self.alias_table = alias_table
249 249 # Regexp to split filenames with spaces in them
250 250 self.space_name_re = re.compile(r'([^\\] )')
251 251 # Hold a local ref. to glob.glob for speed
252 252 self.glob = glob.glob
253 253
254 254 # Determine if we are running on 'dumb' terminals, like (X)Emacs
255 255 # buffers, to avoid completion problems.
256 256 term = os.environ.get('TERM','xterm')
257 257 self.dumb_terminal = term in ['dumb','emacs']
258 258
259 259 # Special handling of backslashes needed in win32 platforms
260 260 if sys.platform == "win32":
261 261 self.clean_glob = self._clean_glob_win32
262 262 else:
263 263 self.clean_glob = self._clean_glob
264 264 self.matchers = [self.python_matches,
265 265 self.file_matches,
266 266 self.alias_matches,
267 267 self.python_func_kw_matches]
268 268
269 269
270 270 # Code contributed by Alex Schmolck, for ipython/emacs integration
271 271 def all_completions(self, text):
272 272 """Return all possible completions for the benefit of emacs."""
273 273
274 274 completions = []
275 275 comp_append = completions.append
276 276 try:
277 277 for i in xrange(sys.maxint):
278 278 res = self.complete(text, i)
279 279
280 280 if not res: break
281 281
282 282 comp_append(res)
283 283 #XXX workaround for ``notDefined.<tab>``
284 284 except NameError:
285 285 pass
286 286 return completions
287 287 # /end Alex Schmolck code.
288 288
289 289 def _clean_glob(self,text):
290 290 return self.glob("%s*" % text)
291 291
292 292 def _clean_glob_win32(self,text):
293 293 return [f.replace("\\","/")
294 294 for f in self.glob("%s*" % text)]
295 295
296 296 def file_matches(self, text):
297 297 """Match filenames, expanding ~USER type strings.
298 298
299 299 Most of the seemingly convoluted logic in this completer is an
300 300 attempt to handle filenames with spaces in them. And yet it's not
301 301 quite perfect, because Python's readline doesn't expose all of the
302 302 GNU readline details needed for this to be done correctly.
303 303
304 304 For a filename with a space in it, the printed completions will be
305 305 only the parts after what's already been typed (instead of the
306 306 full completions, as is normally done). I don't think with the
307 307 current (as of Python 2.3) Python readline it's possible to do
308 308 better."""
309 309
310 310 #print 'Completer->file_matches: <%s>' % text # dbg
311 311
312 312 # chars that require escaping with backslash - i.e. chars
313 313 # that readline treats incorrectly as delimiters, but we
314 314 # don't want to treat as delimiters in filename matching
315 315 # when escaped with backslash
316 316
317 protectables = ' '
317 if sys.platform == 'win32':
318 protectables = ' '
319 else:
320 protectables = ' ()'
318 321
319 322 if text.startswith('!'):
320 323 text = text[1:]
321 324 text_prefix = '!'
322 325 else:
323 326 text_prefix = ''
324 327
325 328 def protect_filename(s):
326 329 return "".join([(ch in protectables and '\\' + ch or ch)
327 330 for ch in s])
328 331
329 332 def single_dir_expand(matches):
330 333 "Recursively expand match lists containing a single dir."
331 334
332 335 if len(matches) == 1 and os.path.isdir(matches[0]):
333 336 # Takes care of links to directories also. Use '/'
334 337 # explicitly, even under Windows, so that name completions
335 338 # don't end up escaped.
336 339 d = matches[0]
337 340 if d[-1] in ['/','\\']:
338 341 d = d[:-1]
339 342
340 343 subdirs = os.listdir(d)
341 344 if subdirs:
342 345 matches = [ (d + '/' + p) for p in subdirs]
343 346 return single_dir_expand(matches)
344 347 else:
345 348 return matches
346 349 else:
347 350 return matches
348 351
349 352 lbuf = self.lbuf
350 353 open_quotes = 0 # track strings with open quotes
351 354 try:
352 355 lsplit = shlex.split(lbuf)[-1]
353 356 except ValueError:
354 357 # typically an unmatched ", or backslash without escaped char.
355 358 if lbuf.count('"')==1:
356 359 open_quotes = 1
357 360 lsplit = lbuf.split('"')[-1]
358 361 elif lbuf.count("'")==1:
359 362 open_quotes = 1
360 363 lsplit = lbuf.split("'")[-1]
361 364 else:
362 365 return []
363 366 except IndexError:
364 367 # tab pressed on empty line
365 368 lsplit = ""
366 369
367 370 if lsplit != protect_filename(lsplit):
368 371 # if protectables are found, do matching on the whole escaped
369 372 # name
370 373 has_protectables = 1
371 374 text0,text = text,lsplit
372 375 else:
373 376 has_protectables = 0
374 377 text = os.path.expanduser(text)
375 378
376 379 if text == "":
377 380 return [text_prefix + protect_filename(f) for f in self.glob("*")]
378 381
379 382 m0 = self.clean_glob(text.replace('\\',''))
380 383 if has_protectables:
381 384 # If we had protectables, we need to revert our changes to the
382 385 # beginning of filename so that we don't double-write the part
383 386 # of the filename we have so far
384 387 len_lsplit = len(lsplit)
385 388 matches = [text_prefix + text0 +
386 389 protect_filename(f[len_lsplit:]) for f in m0]
387 390 else:
388 391 if open_quotes:
389 392 # if we have a string with an open quote, we don't need to
390 393 # protect the names at all (and we _shouldn't_, as it
391 394 # would cause bugs when the filesystem call is made).
392 395 matches = m0
393 396 else:
394 397 matches = [text_prefix +
395 398 protect_filename(f) for f in m0]
396 399
397 400 #print 'mm',matches # dbg
398 401 return single_dir_expand(matches)
399 402
400 403 def alias_matches(self, text):
401 404 """Match internal system aliases"""
402 405 #print 'Completer->alias_matches:',text,'lb',self.lbuf # dbg
403 406
404 407 # if we are not in the first 'item', alias matching
405 408 # doesn't make sense - unless we are starting with 'sudo' command.
406 409 if ' ' in self.lbuf.lstrip() and not self.lbuf.lstrip().startswith('sudo'):
407 410 return []
408 411 text = os.path.expanduser(text)
409 412 aliases = self.alias_table.keys()
410 413 if text == "":
411 414 return aliases
412 415 else:
413 416 return [alias for alias in aliases if alias.startswith(text)]
414 417
415 418 def python_matches(self,text):
416 419 """Match attributes or global python names"""
417 420
418 421 #print 'Completer->python_matches, txt=<%s>' % text # dbg
419 422 if "." in text:
420 423 try:
421 424 matches = self.attr_matches(text)
422 425 if text.endswith('.') and self.omit__names:
423 426 if self.omit__names == 1:
424 427 # true if txt is _not_ a __ name, false otherwise:
425 428 no__name = (lambda txt:
426 429 re.match(r'.*\.__.*?__',txt) is None)
427 430 else:
428 431 # true if txt is _not_ a _ name, false otherwise:
429 432 no__name = (lambda txt:
430 433 re.match(r'.*\._.*?',txt) is None)
431 434 matches = filter(no__name, matches)
432 435 except NameError:
433 436 # catches <undefined attributes>.<tab>
434 437 matches = []
435 438 else:
436 439 matches = self.global_matches(text)
437 440 # this is so completion finds magics when automagic is on:
438 441 if (matches == [] and
439 442 not text.startswith(os.sep) and
440 443 not ' ' in self.lbuf):
441 444 matches = self.attr_matches(self.magic_prefix+text)
442 445 return matches
443 446
444 447 def _default_arguments(self, obj):
445 448 """Return the list of default arguments of obj if it is callable,
446 449 or empty list otherwise."""
447 450
448 451 if not (inspect.isfunction(obj) or inspect.ismethod(obj)):
449 452 # for classes, check for __init__,__new__
450 453 if inspect.isclass(obj):
451 454 obj = (getattr(obj,'__init__',None) or
452 455 getattr(obj,'__new__',None))
453 456 # for all others, check if they are __call__able
454 457 elif hasattr(obj, '__call__'):
455 458 obj = obj.__call__
456 459 # XXX: is there a way to handle the builtins ?
457 460 try:
458 461 args,_,_1,defaults = inspect.getargspec(obj)
459 462 if defaults:
460 463 return args[-len(defaults):]
461 464 except TypeError: pass
462 465 return []
463 466
464 467 def python_func_kw_matches(self,text):
465 468 """Match named parameters (kwargs) of the last open function"""
466 469
467 470 if "." in text: # a parameter cannot be dotted
468 471 return []
469 472 try: regexp = self.__funcParamsRegex
470 473 except AttributeError:
471 474 regexp = self.__funcParamsRegex = re.compile(r'''
472 475 '.*?' | # single quoted strings or
473 476 ".*?" | # double quoted strings or
474 477 \w+ | # identifier
475 478 \S # other characters
476 479 ''', re.VERBOSE | re.DOTALL)
477 480 # 1. find the nearest identifier that comes before an unclosed
478 481 # parenthesis e.g. for "foo (1+bar(x), pa", the candidate is "foo"
479 482 tokens = regexp.findall(self.get_line_buffer())
480 483 tokens.reverse()
481 484 iterTokens = iter(tokens); openPar = 0
482 485 for token in iterTokens:
483 486 if token == ')':
484 487 openPar -= 1
485 488 elif token == '(':
486 489 openPar += 1
487 490 if openPar > 0:
488 491 # found the last unclosed parenthesis
489 492 break
490 493 else:
491 494 return []
492 495 # 2. Concatenate dotted names ("foo.bar" for "foo.bar(x, pa" )
493 496 ids = []
494 497 isId = re.compile(r'\w+$').match
495 498 while True:
496 499 try:
497 500 ids.append(iterTokens.next())
498 501 if not isId(ids[-1]):
499 502 ids.pop(); break
500 503 if not iterTokens.next() == '.':
501 504 break
502 505 except StopIteration:
503 506 break
504 507 # lookup the candidate callable matches either using global_matches
505 508 # or attr_matches for dotted names
506 509 if len(ids) == 1:
507 510 callableMatches = self.global_matches(ids[0])
508 511 else:
509 512 callableMatches = self.attr_matches('.'.join(ids[::-1]))
510 513 argMatches = []
511 514 for callableMatch in callableMatches:
512 515 try: namedArgs = self._default_arguments(eval(callableMatch,
513 516 self.namespace))
514 517 except: continue
515 518 for namedArg in namedArgs:
516 519 if namedArg.startswith(text):
517 520 argMatches.append("%s=" %namedArg)
518 521 return argMatches
519 522
520 523 def dispatch_custom_completer(self,text):
521 524 #print "Custom! '%s' %s" % (text, self.custom_completers) # dbg
522 525 line = self.full_lbuf
523 526 if not line.strip():
524 527 return None
525 528
526 529 event = Struct()
527 530 event.line = line
528 531 event.symbol = text
529 532 cmd = line.split(None,1)[0]
530 533 event.command = cmd
531 534 #print "\ncustom:{%s]\n" % event # dbg
532 535
533 536 # for foo etc, try also to find completer for %foo
534 537 if not cmd.startswith(self.magic_escape):
535 538 try_magic = self.custom_completers.s_matches(
536 539 self.magic_escape + cmd)
537 540 else:
538 541 try_magic = []
539 542
540 543
541 544 for c in itertools.chain(
542 545 self.custom_completers.s_matches(cmd),
543 546 try_magic,
544 547 self.custom_completers.flat_matches(self.lbuf)):
545 548 #print "try",c # dbg
546 549 try:
547 550 res = c(event)
548 551 # first, try case sensitive match
549 552 withcase = [r for r in res if r.startswith(text)]
550 553 if withcase:
551 554 return withcase
552 555 # if none, then case insensitive ones are ok too
553 556 return [r for r in res if r.lower().startswith(text.lower())]
554 557 except ipapi.TryNext:
555 558 pass
556 559
557 560 return None
558 561
559 562 def complete(self, text, state,line_buffer=None):
560 563 """Return the next possible completion for 'text'.
561 564
562 565 This is called successively with state == 0, 1, 2, ... until it
563 566 returns None. The completion should begin with 'text'.
564 567
565 568 :Keywords:
566 569 - line_buffer: string
567 570 If not given, the completer attempts to obtain the current line buffer
568 571 via readline. This keyword allows clients which are requesting for
569 572 text completions in non-readline contexts to inform the completer of
570 573 the entire text.
571 574 """
572 575
573 576 #print '\n*** COMPLETE: <%s> (%s)' % (text,state) # dbg
574 577
575 578 # if there is only a tab on a line with only whitespace, instead
576 579 # of the mostly useless 'do you want to see all million
577 580 # completions' message, just do the right thing and give the user
578 581 # his tab! Incidentally, this enables pasting of tabbed text from
579 582 # an editor (as long as autoindent is off).
580 583
581 584 # It should be noted that at least pyreadline still shows
582 585 # file completions - is there a way around it?
583 586
584 587 # don't apply this on 'dumb' terminals, such as emacs buffers, so we
585 588 # don't interfere with their own tab-completion mechanism.
586 589 if line_buffer is None:
587 590 self.full_lbuf = self.get_line_buffer()
588 591 else:
589 592 self.full_lbuf = line_buffer
590 593
591 594 if not (self.dumb_terminal or self.full_lbuf.strip()):
592 595 self.readline.insert_text('\t')
593 596 return None
594 597
595 598 magic_escape = self.magic_escape
596 599 magic_prefix = self.magic_prefix
597 600
598 601 self.lbuf = self.full_lbuf[:self.get_endidx()]
599 602
600 603 try:
601 604 if text.startswith(magic_escape):
602 605 text = text.replace(magic_escape,magic_prefix)
603 606 elif text.startswith('~'):
604 607 text = os.path.expanduser(text)
605 608 if state == 0:
606 609 custom_res = self.dispatch_custom_completer(text)
607 610 if custom_res is not None:
608 611 # did custom completers produce something?
609 612 self.matches = custom_res
610 613 else:
611 614 # Extend the list of completions with the results of each
612 615 # matcher, so we return results to the user from all
613 616 # namespaces.
614 617 if self.merge_completions:
615 618 self.matches = []
616 619 for matcher in self.matchers:
617 620 self.matches.extend(matcher(text))
618 621 else:
619 622 for matcher in self.matchers:
620 623 self.matches = matcher(text)
621 624 if self.matches:
622 625 break
623 626 def uniq(alist):
624 627 set = {}
625 628 return [set.setdefault(e,e) for e in alist if e not in set]
626 629 self.matches = uniq(self.matches)
627 630 try:
628 631 ret = self.matches[state].replace(magic_prefix,magic_escape)
629 632 return ret
630 633 except IndexError:
631 634 return None
632 635 except:
633 636 #from IPython.ultraTB import AutoFormattedTB; # dbg
634 637 #tb=AutoFormattedTB('Verbose');tb() #dbg
635 638
636 639 # If completion fails, don't annoy the user.
637 640 return None
@@ -1,104 +1,102
1 1 # encoding: utf-8
2 2
3 3 """This is the official entry point to IPython's configuration system. """
4 4
5 5 __docformat__ = "restructuredtext en"
6 6
7 7 #-------------------------------------------------------------------------------
8 8 # Copyright (C) 2008 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-------------------------------------------------------------------------------
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Imports
16 16 #-------------------------------------------------------------------------------
17 17
18 18 import os
19 from IPython.config.cutils import get_home_dir, get_ipython_dir
19 from os.path import join as pjoin
20
21 from IPython.genutils import get_home_dir, get_ipython_dir
20 22 from IPython.external.configobj import ConfigObj
21 23
22 # Traitlets config imports
23 from IPython.config import traitlets
24 from IPython.config.config import *
25 from traitlets import *
26 24
27 25 class ConfigObjManager(object):
28 26
29 27 def __init__(self, configObj, filename):
30 28 self.current = configObj
31 29 self.current.indent_type = ' '
32 30 self.filename = filename
33 31 # self.write_default_config_file()
34 32
35 33 def get_config_obj(self):
36 34 return self.current
37 35
38 36 def update_config_obj(self, newConfig):
39 37 self.current.merge(newConfig)
40 38
41 39 def update_config_obj_from_file(self, filename):
42 40 newConfig = ConfigObj(filename, file_error=False)
43 41 self.current.merge(newConfig)
44 42
45 43 def update_config_obj_from_default_file(self, ipythondir=None):
46 44 fname = self.resolve_file_path(self.filename, ipythondir)
47 45 self.update_config_obj_from_file(fname)
48 46
49 47 def write_config_obj_to_file(self, filename):
50 48 f = open(filename, 'w')
51 49 self.current.write(f)
52 50 f.close()
53 51
54 52 def write_default_config_file(self):
55 53 ipdir = get_ipython_dir()
56 fname = ipdir + '/' + self.filename
54 fname = pjoin(ipdir, self.filename)
57 55 if not os.path.isfile(fname):
58 56 print "Writing the configuration file to: " + fname
59 57 self.write_config_obj_to_file(fname)
60 58
61 59 def _import(self, key):
62 60 package = '.'.join(key.split('.')[0:-1])
63 61 obj = key.split('.')[-1]
64 62 execString = 'from %s import %s' % (package, obj)
65 63 exec execString
66 64 exec 'temp = %s' % obj
67 65 return temp
68 66
69 67 def resolve_file_path(self, filename, ipythondir = None):
70 68 """Resolve filenames into absolute paths.
71 69
72 70 This function looks in the following directories in order:
73 71
74 72 1. In the current working directory or by absolute path with ~ expanded
75 73 2. In ipythondir if that is set
76 74 3. In the IPYTHONDIR environment variable if it exists
77 75 4. In the ~/.ipython directory
78 76
79 77 Note: The IPYTHONDIR is also used by the trunk version of IPython so
80 78 changing it will also affect it was well.
81 79 """
82 80
83 81 # In cwd or by absolute path with ~ expanded
84 82 trythis = os.path.expanduser(filename)
85 83 if os.path.isfile(trythis):
86 84 return trythis
87 85
88 86 # In ipythondir if it is set
89 87 if ipythondir is not None:
90 trythis = ipythondir + '/' + filename
88 trythis = pjoin(ipythondir, filename)
91 89 if os.path.isfile(trythis):
92 90 return trythis
93 91
94 trythis = get_ipython_dir() + '/' + filename
92 trythis = pjoin(get_ipython_dir(), filename)
95 93 if os.path.isfile(trythis):
96 94 return trythis
97 95
98 96 return None
99 97
100 98
101 99
102 100
103 101
104 102
@@ -1,99 +1,34
1 1 # encoding: utf-8
2 2
3 3 """Configuration-related utilities for all IPython."""
4 4
5 5 __docformat__ = "restructuredtext en"
6 6
7 7 #-------------------------------------------------------------------------------
8 8 # Copyright (C) 2008 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-------------------------------------------------------------------------------
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Imports
16 16 #-------------------------------------------------------------------------------
17 17
18 18 import os
19 19 import sys
20 20
21 21 #---------------------------------------------------------------------------
22 22 # Normal code begins
23 23 #---------------------------------------------------------------------------
24 24
25 class HomeDirError(Exception):
26 pass
27
28 def get_home_dir():
29 """Return the closest possible equivalent to a 'home' directory.
30
31 We first try $HOME. Absent that, on NT it's $HOMEDRIVE\$HOMEPATH.
32
33 Currently only Posix and NT are implemented, a HomeDirError exception is
34 raised for all other OSes. """
35
36 isdir = os.path.isdir
37 env = os.environ
38 try:
39 homedir = env['HOME']
40 if not isdir(homedir):
41 # in case a user stuck some string which does NOT resolve to a
42 # valid path, it's as good as if we hadn't foud it
43 raise KeyError
44 return homedir
45 except KeyError:
46 if os.name == 'posix':
47 raise HomeDirError,'undefined $HOME, IPython can not proceed.'
48 elif os.name == 'nt':
49 # For some strange reason, win9x returns 'nt' for os.name.
50 try:
51 homedir = os.path.join(env['HOMEDRIVE'],env['HOMEPATH'])
52 if not isdir(homedir):
53 homedir = os.path.join(env['USERPROFILE'])
54 if not isdir(homedir):
55 raise HomeDirError
56 return homedir
57 except:
58 try:
59 # Use the registry to get the 'My Documents' folder.
60 import _winreg as wreg
61 key = wreg.OpenKey(wreg.HKEY_CURRENT_USER,
62 "Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
63 homedir = wreg.QueryValueEx(key,'Personal')[0]
64 key.Close()
65 if not isdir(homedir):
66 e = ('Invalid "Personal" folder registry key '
67 'typically "My Documents".\n'
68 'Value: %s\n'
69 'This is not a valid directory on your system.' %
70 homedir)
71 raise HomeDirError(e)
72 return homedir
73 except HomeDirError:
74 raise
75 except:
76 return 'C:\\'
77 elif os.name == 'dos':
78 # Desperate, may do absurd things in classic MacOS. May work under DOS.
79 return 'C:\\'
80 else:
81 raise HomeDirError,'support for your operating system not implemented.'
82
83 def get_ipython_dir():
84 ipdir_def = '.ipython'
85 home_dir = get_home_dir()
86 ipdir = os.path.abspath(os.environ.get('IPYTHONDIR',
87 os.path.join(home_dir,ipdir_def)))
88 return ipdir
89
90 25 def import_item(key):
91 26 """
92 27 Import and return bar given the string foo.bar.
93 28 """
94 29 package = '.'.join(key.split('.')[0:-1])
95 30 obj = key.split('.')[-1]
96 31 execString = 'from %s import %s' % (package, obj)
97 32 exec execString
98 33 exec 'temp = %s' % obj
99 34 return temp
@@ -1,526 +1,526
1 1 """Module for interactive demos using IPython.
2 2
3 3 This module implements a few classes for running Python scripts interactively
4 4 in IPython for demonstrations. With very simple markup (a few tags in
5 5 comments), you can control points where the script stops executing and returns
6 6 control to IPython.
7 7
8 8
9 9 Provided classes
10 10 ================
11 11
12 12 The classes are (see their docstrings for further details):
13 13
14 14 - Demo: pure python demos
15 15
16 16 - IPythonDemo: demos with input to be processed by IPython as if it had been
17 17 typed interactively (so magics work, as well as any other special syntax you
18 18 may have added via input prefilters).
19 19
20 20 - LineDemo: single-line version of the Demo class. These demos are executed
21 21 one line at a time, and require no markup.
22 22
23 23 - IPythonLineDemo: IPython version of the LineDemo class (the demo is
24 24 executed a line at a time, but processed via IPython).
25 25
26 26 - ClearMixin: mixin to make Demo classes with less visual clutter. It
27 27 declares an empty marquee and a pre_cmd that clears the screen before each
28 28 block (see Subclassing below).
29 29
30 30 - ClearDemo, ClearIPDemo: mixin-enabled versions of the Demo and IPythonDemo
31 31 classes.
32 32
33 33
34 34 Subclassing
35 35 ===========
36 36
37 37 The classes here all include a few methods meant to make customization by
38 38 subclassing more convenient. Their docstrings below have some more details:
39 39
40 40 - marquee(): generates a marquee to provide visible on-screen markers at each
41 41 block start and end.
42 42
43 43 - pre_cmd(): run right before the execution of each block.
44 44
45 45 - post_cmd(): run right after the execution of each block. If the block
46 46 raises an exception, this is NOT called.
47 47
48 48
49 49 Operation
50 50 =========
51 51
52 52 The file is run in its own empty namespace (though you can pass it a string of
53 53 arguments as if in a command line environment, and it will see those as
54 54 sys.argv). But at each stop, the global IPython namespace is updated with the
55 55 current internal demo namespace, so you can work interactively with the data
56 56 accumulated so far.
57 57
58 58 By default, each block of code is printed (with syntax highlighting) before
59 59 executing it and you have to confirm execution. This is intended to show the
60 60 code to an audience first so you can discuss it, and only proceed with
61 61 execution once you agree. There are a few tags which allow you to modify this
62 62 behavior.
63 63
64 64 The supported tags are:
65 65
66 66 # <demo> stop
67 67
68 68 Defines block boundaries, the points where IPython stops execution of the
69 69 file and returns to the interactive prompt.
70 70
71 71 You can optionally mark the stop tag with extra dashes before and after the
72 72 word 'stop', to help visually distinguish the blocks in a text editor:
73 73
74 74 # <demo> --- stop ---
75 75
76 76
77 77 # <demo> silent
78 78
79 79 Make a block execute silently (and hence automatically). Typically used in
80 80 cases where you have some boilerplate or initialization code which you need
81 81 executed but do not want to be seen in the demo.
82 82
83 83 # <demo> auto
84 84
85 85 Make a block execute automatically, but still being printed. Useful for
86 86 simple code which does not warrant discussion, since it avoids the extra
87 87 manual confirmation.
88 88
89 89 # <demo> auto_all
90 90
91 91 This tag can _only_ be in the first block, and if given it overrides the
92 92 individual auto tags to make the whole demo fully automatic (no block asks
93 93 for confirmation). It can also be given at creation time (or the attribute
94 94 set later) to override what's in the file.
95 95
96 96 While _any_ python file can be run as a Demo instance, if there are no stop
97 97 tags the whole file will run in a single block (no different that calling
98 98 first %pycat and then %run). The minimal markup to make this useful is to
99 99 place a set of stop tags; the other tags are only there to let you fine-tune
100 100 the execution.
101 101
102 102 This is probably best explained with the simple example file below. You can
103 103 copy this into a file named ex_demo.py, and try running it via:
104 104
105 105 from IPython.demo import Demo
106 106 d = Demo('ex_demo.py')
107 107 d() <--- Call the d object (omit the parens if you have autocall set to 2).
108 108
109 109 Each time you call the demo object, it runs the next block. The demo object
110 110 has a few useful methods for navigation, like again(), edit(), jump(), seek()
111 111 and back(). It can be reset for a new run via reset() or reloaded from disk
112 112 (in case you've edited the source) via reload(). See their docstrings below.
113 113
114 114
115 115 Example
116 116 =======
117 117
118 118 The following is a very simple example of a valid demo file.
119 119
120 120 #################### EXAMPLE DEMO <ex_demo.py> ###############################
121 121 '''A simple interactive demo to illustrate the use of IPython's Demo class.'''
122 122
123 123 print 'Hello, welcome to an interactive IPython demo.'
124 124
125 125 # The mark below defines a block boundary, which is a point where IPython will
126 126 # stop execution and return to the interactive prompt. The dashes are actually
127 127 # optional and used only as a visual aid to clearly separate blocks while
128 128 editing the demo code.
129 129 # <demo> stop
130 130
131 131 x = 1
132 132 y = 2
133 133
134 134 # <demo> stop
135 135
136 136 # the mark below makes this block as silent
137 137 # <demo> silent
138 138
139 139 print 'This is a silent block, which gets executed but not printed.'
140 140
141 141 # <demo> stop
142 142 # <demo> auto
143 143 print 'This is an automatic block.'
144 144 print 'It is executed without asking for confirmation, but printed.'
145 145 z = x+y
146 146
147 147 print 'z=',x
148 148
149 149 # <demo> stop
150 150 # This is just another normal block.
151 151 print 'z is now:', z
152 152
153 153 print 'bye!'
154 154 ################### END EXAMPLE DEMO <ex_demo.py> ############################
155 155 """
156 156
157 157 #*****************************************************************************
158 158 # Copyright (C) 2005-2006 Fernando Perez. <Fernando.Perez@colorado.edu>
159 159 #
160 160 # Distributed under the terms of the BSD License. The full license is in
161 161 # the file COPYING, distributed as part of this software.
162 162 #
163 163 #*****************************************************************************
164 164
165 165 import exceptions
166 166 import os
167 167 import re
168 168 import shlex
169 169 import sys
170 170
171 171 from IPython.PyColorize import Parser
172 172 from IPython.genutils import marquee, file_read, file_readlines
173 173
174 174 __all__ = ['Demo','IPythonDemo','LineDemo','IPythonLineDemo','DemoError']
175 175
176 176 class DemoError(exceptions.Exception): pass
177 177
178 178 def re_mark(mark):
179 179 return re.compile(r'^\s*#\s+<demo>\s+%s\s*$' % mark,re.MULTILINE)
180 180
181 181 class Demo(object):
182 182
183 re_stop = re_mark('-?\s?stop\s?-?')
183 re_stop = re_mark('-*\s?stop\s?-*')
184 184 re_silent = re_mark('silent')
185 185 re_auto = re_mark('auto')
186 186 re_auto_all = re_mark('auto_all')
187 187
188 188 def __init__(self,fname,arg_str='',auto_all=None):
189 189 """Make a new demo object. To run the demo, simply call the object.
190 190
191 191 See the module docstring for full details and an example (you can use
192 192 IPython.Demo? in IPython to see it).
193 193
194 194 Inputs:
195 195
196 196 - fname = filename.
197 197
198 198 Optional inputs:
199 199
200 200 - arg_str(''): a string of arguments, internally converted to a list
201 201 just like sys.argv, so the demo script can see a similar
202 202 environment.
203 203
204 204 - auto_all(None): global flag to run all blocks automatically without
205 205 confirmation. This attribute overrides the block-level tags and
206 206 applies to the whole demo. It is an attribute of the object, and
207 207 can be changed at runtime simply by reassigning it to a boolean
208 208 value.
209 209 """
210 210
211 211 self.fname = fname
212 212 self.sys_argv = [fname] + shlex.split(arg_str)
213 213 self.auto_all = auto_all
214 214
215 215 # get a few things from ipython. While it's a bit ugly design-wise,
216 216 # it ensures that things like color scheme and the like are always in
217 217 # sync with the ipython mode being used. This class is only meant to
218 218 # be used inside ipython anyways, so it's OK.
219 219 self.ip_ns = __IPYTHON__.user_ns
220 220 self.ip_colorize = __IPYTHON__.pycolorize
221 221 self.ip_showtb = __IPYTHON__.showtraceback
222 222 self.ip_runlines = __IPYTHON__.runlines
223 223 self.shell = __IPYTHON__
224 224
225 225 # load user data and initialize data structures
226 226 self.reload()
227 227
228 228 def reload(self):
229 229 """Reload source from disk and initialize state."""
230 230 # read data and parse into blocks
231 231 self.src = file_read(self.fname)
232 232 src_b = [b.strip() for b in self.re_stop.split(self.src) if b]
233 233 self._silent = [bool(self.re_silent.findall(b)) for b in src_b]
234 234 self._auto = [bool(self.re_auto.findall(b)) for b in src_b]
235 235
236 236 # if auto_all is not given (def. None), we read it from the file
237 237 if self.auto_all is None:
238 238 self.auto_all = bool(self.re_auto_all.findall(src_b[0]))
239 239 else:
240 240 self.auto_all = bool(self.auto_all)
241 241
242 242 # Clean the sources from all markup so it doesn't get displayed when
243 243 # running the demo
244 244 src_blocks = []
245 245 auto_strip = lambda s: self.re_auto.sub('',s)
246 246 for i,b in enumerate(src_b):
247 247 if self._auto[i]:
248 248 src_blocks.append(auto_strip(b))
249 249 else:
250 250 src_blocks.append(b)
251 251 # remove the auto_all marker
252 252 src_blocks[0] = self.re_auto_all.sub('',src_blocks[0])
253 253
254 254 self.nblocks = len(src_blocks)
255 255 self.src_blocks = src_blocks
256 256
257 257 # also build syntax-highlighted source
258 258 self.src_blocks_colored = map(self.ip_colorize,self.src_blocks)
259 259
260 260 # ensure clean namespace and seek offset
261 261 self.reset()
262 262
263 263 def reset(self):
264 264 """Reset the namespace and seek pointer to restart the demo"""
265 265 self.user_ns = {}
266 266 self.finished = False
267 267 self.block_index = 0
268 268
269 269 def _validate_index(self,index):
270 270 if index<0 or index>=self.nblocks:
271 271 raise ValueError('invalid block index %s' % index)
272 272
273 273 def _get_index(self,index):
274 274 """Get the current block index, validating and checking status.
275 275
276 276 Returns None if the demo is finished"""
277 277
278 278 if index is None:
279 279 if self.finished:
280 280 print 'Demo finished. Use reset() if you want to rerun it.'
281 281 return None
282 282 index = self.block_index
283 283 else:
284 284 self._validate_index(index)
285 285 return index
286 286
287 287 def seek(self,index):
288 288 """Move the current seek pointer to the given block.
289 289
290 290 You can use negative indices to seek from the end, with identical
291 291 semantics to those of Python lists."""
292 292 if index<0:
293 293 index = self.nblocks + index
294 294 self._validate_index(index)
295 295 self.block_index = index
296 296 self.finished = False
297 297
298 298 def back(self,num=1):
299 299 """Move the seek pointer back num blocks (default is 1)."""
300 300 self.seek(self.block_index-num)
301 301
302 302 def jump(self,num=1):
303 303 """Jump a given number of blocks relative to the current one.
304 304
305 305 The offset can be positive or negative, defaults to 1."""
306 306 self.seek(self.block_index+num)
307 307
308 308 def again(self):
309 309 """Move the seek pointer back one block and re-execute."""
310 310 self.back(1)
311 311 self()
312 312
313 313 def edit(self,index=None):
314 314 """Edit a block.
315 315
316 316 If no number is given, use the last block executed.
317 317
318 318 This edits the in-memory copy of the demo, it does NOT modify the
319 319 original source file. If you want to do that, simply open the file in
320 320 an editor and use reload() when you make changes to the file. This
321 321 method is meant to let you change a block during a demonstration for
322 322 explanatory purposes, without damaging your original script."""
323 323
324 324 index = self._get_index(index)
325 325 if index is None:
326 326 return
327 327 # decrease the index by one (unless we're at the very beginning), so
328 328 # that the default demo.edit() call opens up the sblock we've last run
329 329 if index>0:
330 330 index -= 1
331 331
332 332 filename = self.shell.mktempfile(self.src_blocks[index])
333 333 self.shell.hooks.editor(filename,1)
334 334 new_block = file_read(filename)
335 335 # update the source and colored block
336 336 self.src_blocks[index] = new_block
337 337 self.src_blocks_colored[index] = self.ip_colorize(new_block)
338 338 self.block_index = index
339 339 # call to run with the newly edited index
340 340 self()
341 341
342 342 def show(self,index=None):
343 343 """Show a single block on screen"""
344 344
345 345 index = self._get_index(index)
346 346 if index is None:
347 347 return
348 348
349 349 print self.marquee('<%s> block # %s (%s remaining)' %
350 350 (self.fname,index,self.nblocks-index-1))
351 351 sys.stdout.write(self.src_blocks_colored[index])
352 352 sys.stdout.flush()
353 353
354 354 def show_all(self):
355 355 """Show entire demo on screen, block by block"""
356 356
357 357 fname = self.fname
358 358 nblocks = self.nblocks
359 359 silent = self._silent
360 360 marquee = self.marquee
361 361 for index,block in enumerate(self.src_blocks_colored):
362 362 if silent[index]:
363 363 print marquee('<%s> SILENT block # %s (%s remaining)' %
364 364 (fname,index,nblocks-index-1))
365 365 else:
366 366 print marquee('<%s> block # %s (%s remaining)' %
367 367 (fname,index,nblocks-index-1))
368 368 print block,
369 369 sys.stdout.flush()
370 370
371 371 def runlines(self,source):
372 372 """Execute a string with one or more lines of code"""
373 373
374 374 exec source in self.user_ns
375 375
376 376 def __call__(self,index=None):
377 377 """run a block of the demo.
378 378
379 379 If index is given, it should be an integer >=1 and <= nblocks. This
380 380 means that the calling convention is one off from typical Python
381 381 lists. The reason for the inconsistency is that the demo always
382 382 prints 'Block n/N, and N is the total, so it would be very odd to use
383 383 zero-indexing here."""
384 384
385 385 index = self._get_index(index)
386 386 if index is None:
387 387 return
388 388 try:
389 389 marquee = self.marquee
390 390 next_block = self.src_blocks[index]
391 391 self.block_index += 1
392 392 if self._silent[index]:
393 393 print marquee('Executing silent block # %s (%s remaining)' %
394 394 (index,self.nblocks-index-1))
395 395 else:
396 396 self.pre_cmd()
397 397 self.show(index)
398 398 if self.auto_all or self._auto[index]:
399 399 print marquee('output:')
400 400 else:
401 401 print marquee('Press <q> to quit, <Enter> to execute...'),
402 402 ans = raw_input().strip()
403 403 if ans:
404 404 print marquee('Block NOT executed')
405 405 return
406 406 try:
407 407 save_argv = sys.argv
408 408 sys.argv = self.sys_argv
409 409 self.runlines(next_block)
410 410 self.post_cmd()
411 411 finally:
412 412 sys.argv = save_argv
413 413
414 414 except:
415 415 self.ip_showtb(filename=self.fname)
416 416 else:
417 417 self.ip_ns.update(self.user_ns)
418 418
419 419 if self.block_index == self.nblocks:
420 420 mq1 = self.marquee('END OF DEMO')
421 421 if mq1:
422 422 # avoid spurious prints if empty marquees are used
423 423 print
424 424 print mq1
425 425 print self.marquee('Use reset() if you want to rerun it.')
426 426 self.finished = True
427 427
428 428 # These methods are meant to be overridden by subclasses who may wish to
429 429 # customize the behavior of of their demos.
430 430 def marquee(self,txt='',width=78,mark='*'):
431 431 """Return the input string centered in a 'marquee'."""
432 432 return marquee(txt,width,mark)
433 433
434 434 def pre_cmd(self):
435 435 """Method called before executing each block."""
436 436 pass
437 437
438 438 def post_cmd(self):
439 439 """Method called after executing each block."""
440 440 pass
441 441
442 442
443 443 class IPythonDemo(Demo):
444 444 """Class for interactive demos with IPython's input processing applied.
445 445
446 446 This subclasses Demo, but instead of executing each block by the Python
447 447 interpreter (via exec), it actually calls IPython on it, so that any input
448 448 filters which may be in place are applied to the input block.
449 449
450 450 If you have an interactive environment which exposes special input
451 451 processing, you can use this class instead to write demo scripts which
452 452 operate exactly as if you had typed them interactively. The default Demo
453 453 class requires the input to be valid, pure Python code.
454 454 """
455 455
456 456 def runlines(self,source):
457 457 """Execute a string with one or more lines of code"""
458 458
459 459 self.shell.runlines(source)
460 460
461 461 class LineDemo(Demo):
462 462 """Demo where each line is executed as a separate block.
463 463
464 464 The input script should be valid Python code.
465 465
466 466 This class doesn't require any markup at all, and it's meant for simple
467 467 scripts (with no nesting or any kind of indentation) which consist of
468 468 multiple lines of input to be executed, one at a time, as if they had been
469 469 typed in the interactive prompt."""
470 470
471 471 def reload(self):
472 472 """Reload source from disk and initialize state."""
473 473 # read data and parse into blocks
474 474 src_b = [l for l in file_readlines(self.fname) if l.strip()]
475 475 nblocks = len(src_b)
476 476 self.src = os.linesep.join(file_readlines(self.fname))
477 477 self._silent = [False]*nblocks
478 478 self._auto = [True]*nblocks
479 479 self.auto_all = True
480 480 self.nblocks = nblocks
481 481 self.src_blocks = src_b
482 482
483 483 # also build syntax-highlighted source
484 484 self.src_blocks_colored = map(self.ip_colorize,self.src_blocks)
485 485
486 486 # ensure clean namespace and seek offset
487 487 self.reset()
488 488
489 489
490 490 class IPythonLineDemo(IPythonDemo,LineDemo):
491 491 """Variant of the LineDemo class whose input is processed by IPython."""
492 492 pass
493 493
494 494
495 495 class ClearMixin(object):
496 496 """Use this mixin to make Demo classes with less visual clutter.
497 497
498 498 Demos using this mixin will clear the screen before every block and use
499 499 blank marquees.
500 500
501 501 Note that in order for the methods defined here to actually override those
502 502 of the classes it's mixed with, it must go /first/ in the inheritance
503 503 tree. For example:
504 504
505 505 class ClearIPDemo(ClearMixin,IPythonDemo): pass
506 506
507 507 will provide an IPythonDemo class with the mixin's features.
508 508 """
509 509
510 510 def marquee(self,txt='',width=78,mark='*'):
511 511 """Blank marquee that returns '' no matter what the input."""
512 512 return ''
513 513
514 514 def pre_cmd(self):
515 515 """Method called before executing each block.
516 516
517 517 This one simply clears the screen."""
518 518 os.system('clear')
519 519
520 520
521 521 class ClearDemo(ClearMixin,Demo):
522 522 pass
523 523
524 524
525 525 class ClearIPDemo(ClearMixin,IPythonDemo):
526 526 pass
@@ -1,168 +1,179
1 1 # Addapted from killableprocess.py.
2 2 #______________________________________________________________________________
3 3 #
4 4 # killableprocess - subprocesses which can be reliably killed
5 5 #
6 6 # Parts of this module are copied from the subprocess.py file contained
7 7 # in the Python distribution.
8 8 #
9 9 # Copyright (c) 2003-2004 by Peter Astrand <astrand@lysator.liu.se>
10 10 #
11 11 # Additions and modifications written by Benjamin Smedberg
12 12 # <benjamin@smedbergs.us> are Copyright (c) 2006 by the Mozilla Foundation
13 13 # <http://www.mozilla.org/>
14 14 #
15 15 # By obtaining, using, and/or copying this software and/or its
16 16 # associated documentation, you agree that you have read, understood,
17 17 # and will comply with the following terms and conditions:
18 18 #
19 19 # Permission to use, copy, modify, and distribute this software and
20 20 # its associated documentation for any purpose and without fee is
21 21 # hereby granted, provided that the above copyright notice appears in
22 22 # all copies, and that both that copyright notice and this permission
23 23 # notice appear in supporting documentation, and that the name of the
24 24 # author not be used in advertising or publicity pertaining to
25 25 # distribution of the software without specific, written prior
26 26 # permission.
27 27 #
28 28 # THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
29 29 # INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS.
30 30 # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR
31 31 # CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
32 32 # OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
33 33 # NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
34 34 # WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
35 35
36 36 r"""killableprocess - Subprocesses which can be reliably killed
37 37
38 38 This module is a subclass of the builtin "subprocess" module. It allows
39 39 processes that launch subprocesses to be reliably killed on Windows (via the Popen.kill() method.
40 40
41 41 It also adds a timeout argument to Wait() for a limited period of time before
42 42 forcefully killing the process.
43 43
44 44 Note: On Windows, this module requires Windows 2000 or higher (no support for
45 45 Windows 95, 98, or NT 4.0). It also requires ctypes, which is bundled with
46 46 Python 2.5+ or available from http://python.net/crew/theller/ctypes/
47 47 """
48 48
49 49 import subprocess
50 50 from subprocess import PIPE
51 51 import sys
52 52 import os
53 import time
54 53 import types
55 54
56 55 try:
57 56 from subprocess import CalledProcessError
58 57 except ImportError:
59 58 # Python 2.4 doesn't implement CalledProcessError
60 59 class CalledProcessError(Exception):
61 60 """This exception is raised when a process run by check_call() returns
62 61 a non-zero exit status. The exit status will be stored in the
63 62 returncode attribute."""
64 63 def __init__(self, returncode, cmd):
65 64 self.returncode = returncode
66 65 self.cmd = cmd
67 66 def __str__(self):
68 67 return "Command '%s' returned non-zero exit status %d" % (self.cmd, self.returncode)
69 68
70 69 mswindows = (sys.platform == "win32")
71 70
71 skip = False
72
72 73 if mswindows:
73 import winprocess
74 import platform
75 if platform.uname()[3] == '' or platform.uname()[3] > '6.0.6000':
76 # Killable process does not work under vista when starting for
77 # something else than cmd.
78 skip = True
79 else:
80 import winprocess
74 81 else:
75 82 import signal
76 83
77 84 if not mswindows:
78 85 def DoNothing(*args):
79 86 pass
80 87
81 class Popen(subprocess.Popen):
88
89 if skip:
90 Popen = subprocess.Popen
91 else:
92 class Popen(subprocess.Popen):
82 93 if not mswindows:
83 94 # Override __init__ to set a preexec_fn
84 95 def __init__(self, *args, **kwargs):
85 96 if len(args) >= 7:
86 97 raise Exception("Arguments preexec_fn and after must be passed by keyword.")
87 98
88 99 real_preexec_fn = kwargs.pop("preexec_fn", None)
89 100 def setpgid_preexec_fn():
90 101 os.setpgid(0, 0)
91 102 if real_preexec_fn:
92 103 apply(real_preexec_fn)
93 104
94 105 kwargs['preexec_fn'] = setpgid_preexec_fn
95 106
96 107 subprocess.Popen.__init__(self, *args, **kwargs)
97 108
98 109 if mswindows:
99 110 def _execute_child(self, args, executable, preexec_fn, close_fds,
100 111 cwd, env, universal_newlines, startupinfo,
101 112 creationflags, shell,
102 113 p2cread, p2cwrite,
103 114 c2pread, c2pwrite,
104 115 errread, errwrite):
105 116 if not isinstance(args, types.StringTypes):
106 117 args = subprocess.list2cmdline(args)
107 118
108 119 if startupinfo is None:
109 120 startupinfo = winprocess.STARTUPINFO()
110 121
111 122 if None not in (p2cread, c2pwrite, errwrite):
112 123 startupinfo.dwFlags |= winprocess.STARTF_USESTDHANDLES
113 124
114 125 startupinfo.hStdInput = int(p2cread)
115 126 startupinfo.hStdOutput = int(c2pwrite)
116 127 startupinfo.hStdError = int(errwrite)
117 128 if shell:
118 129 startupinfo.dwFlags |= winprocess.STARTF_USESHOWWINDOW
119 130 startupinfo.wShowWindow = winprocess.SW_HIDE
120 131 comspec = os.environ.get("COMSPEC", "cmd.exe")
121 132 args = comspec + " /c " + args
122 133
123 134 # We create a new job for this process, so that we can kill
124 135 # the process and any sub-processes
125 136 self._job = winprocess.CreateJobObject()
126 137
127 138 creationflags |= winprocess.CREATE_SUSPENDED
128 139 creationflags |= winprocess.CREATE_UNICODE_ENVIRONMENT
129 140
130 141 hp, ht, pid, tid = winprocess.CreateProcess(
131 142 executable, args,
132 143 None, None, # No special security
133 144 1, # Must inherit handles!
134 145 creationflags,
135 146 winprocess.EnvironmentBlock(env),
136 147 cwd, startupinfo)
137 148
138 149 self._child_created = True
139 150 self._handle = hp
140 151 self._thread = ht
141 152 self.pid = pid
142 153
143 154 winprocess.AssignProcessToJobObject(self._job, hp)
144 155 winprocess.ResumeThread(ht)
145 156
146 157 if p2cread is not None:
147 158 p2cread.Close()
148 159 if c2pwrite is not None:
149 160 c2pwrite.Close()
150 161 if errwrite is not None:
151 162 errwrite.Close()
152 163
153 164 def kill(self, group=True):
154 165 """Kill the process. If group=True, all sub-processes will also be killed."""
155 166 if mswindows:
156 167 if group:
157 168 winprocess.TerminateJobObject(self._job, 127)
158 169 else:
159 170 winprocess.TerminateProcess(self._handle, 127)
160 171 self.returncode = 127
161 172 else:
162 173 if group:
163 174 os.killpg(self.pid, signal.SIGKILL)
164 175 else:
165 176 os.kill(self.pid, signal.SIGKILL)
166 177 self.returncode = -9
167 178
168 179
@@ -1,74 +1,74
1 1 # encoding: utf-8
2 2 """
3 3 Object for encapsulating process execution by using callbacks for stdout,
4 4 stderr and stdin.
5 5 """
6 6 __docformat__ = "restructuredtext en"
7 7
8 8 #-------------------------------------------------------------------------------
9 9 # Copyright (C) 2008 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-------------------------------------------------------------------------------
14 14
15 15 #-------------------------------------------------------------------------------
16 16 # Imports
17 17 #-------------------------------------------------------------------------------
18 18 from killableprocess import Popen, PIPE
19 19 from threading import Thread
20 20 from time import sleep
21 21 import os
22 22
23 23 class PipedProcess(Thread):
24 24 """ Class that encapsulates process execution by using callbacks for
25 25 stdout, stderr and stdin, and providing a reliable way of
26 26 killing it.
27 27 """
28 28
29 29 def __init__(self, command_string, out_callback,
30 30 end_callback=None,):
31 31 """ command_string: the command line executed to start the
32 32 process.
33 33
34 34 out_callback: the python callable called on stdout/stderr.
35 35
36 36 end_callback: an optional callable called when the process
37 37 finishes.
38 38
39 39 These callbacks are called from a different thread as the
40 40 thread from which is started.
41 41 """
42 42 self.command_string = command_string
43 43 self.out_callback = out_callback
44 44 self.end_callback = end_callback
45 45 Thread.__init__(self)
46 46
47 47
48 48 def run(self):
49 49 """ Start the process and hook up the callbacks.
50 50 """
51 51 env = os.environ
52 52 env['TERM'] = 'xterm'
53 process = Popen((self.command_string + ' 2>&1', ), shell=True,
53 process = Popen(self.command_string + ' 2>&1', shell=True,
54 54 env=env,
55 55 universal_newlines=True,
56 56 stdout=PIPE, stdin=PIPE, )
57 57 self.process = process
58 58 while True:
59 59 out_char = process.stdout.read(1)
60 60 if out_char == '':
61 61 if process.poll() is not None:
62 62 # The process has finished
63 63 break
64 64 else:
65 65 # The process is not giving any interesting
66 66 # output. No use polling it immediatly.
67 67 sleep(0.1)
68 68 else:
69 69 self.out_callback(out_char)
70 70
71 71 if self.end_callback is not None:
72 72 self.end_callback()
73 73
74 74
@@ -1,92 +1,76
1 1 """
2 2 Base front end class for all async frontends.
3 3 """
4 4 __docformat__ = "restructuredtext en"
5 5
6 6 #-------------------------------------------------------------------------------
7 7 # Copyright (C) 2008 The IPython Development Team
8 8 #
9 9 # Distributed under the terms of the BSD License. The full license is in
10 10 # the file COPYING, distributed as part of this software.
11 11 #-------------------------------------------------------------------------------
12 12
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Imports
16 16 #-------------------------------------------------------------------------------
17 import uuid
17 from IPython.external import guid
18 18
19 try:
20 from zope.interface import Interface, Attribute, implements, classProvides
21 except ImportError, e:
22 e.message = """%s
23 ________________________________________________________________________________
24 zope.interface is required to run asynchronous frontends.""" % e.message
25 e.args = (e.message, ) + e.args[1:]
26 19
27 from frontendbase import FrontEndBase, IFrontEnd, IFrontEndFactory
28
29 from IPython.kernel.engineservice import IEngineCore
20 from zope.interface import Interface, Attribute, implements, classProvides
21 from twisted.python.failure import Failure
22 from IPython.frontend.frontendbase import FrontEndBase, IFrontEnd, IFrontEndFactory
30 23 from IPython.kernel.core.history import FrontEndHistory
31
32 try:
33 from twisted.python.failure import Failure
34 except ImportError, e:
35 e.message = """%s
36 ________________________________________________________________________________
37 twisted is required to run asynchronous frontends.""" % e.message
38 e.args = (e.message, ) + e.args[1:]
39
40
24 from IPython.kernel.engineservice import IEngineCore
41 25
42 26
43 27 class AsyncFrontEndBase(FrontEndBase):
44 28 """
45 29 Overrides FrontEndBase to wrap execute in a deferred result.
46 30 All callbacks are made as callbacks on the deferred result.
47 31 """
48 32
49 33 implements(IFrontEnd)
50 34 classProvides(IFrontEndFactory)
51 35
52 36 def __init__(self, engine=None, history=None):
53 37 assert(engine==None or IEngineCore.providedBy(engine))
54 38 self.engine = IEngineCore(engine)
55 39 if history is None:
56 40 self.history = FrontEndHistory(input_cache=[''])
57 41 else:
58 42 self.history = history
59 43
60 44
61 45 def execute(self, block, blockID=None):
62 46 """Execute the block and return the deferred result.
63 47
64 48 Parameters:
65 49 block : {str, AST}
66 50 blockID : any
67 51 Caller may provide an ID to identify this block.
68 52 result['blockID'] := blockID
69 53
70 54 Result:
71 55 Deferred result of self.interpreter.execute
72 56 """
73 57
74 58 if(not self.is_complete(block)):
75 59 return Failure(Exception("Block is not compilable"))
76 60
77 61 if(blockID == None):
78 blockID = uuid.uuid4() #random UUID
62 blockID = guid.generate()
79 63
80 64 d = self.engine.execute(block)
81 65 d.addCallback(self._add_history, block=block)
82 66 d.addCallbacks(self._add_block_id_for_result,
83 67 errback=self._add_block_id_for_failure,
84 68 callbackArgs=(blockID,),
85 69 errbackArgs=(blockID,))
86 70 d.addBoth(self.update_cell_prompt, blockID=blockID)
87 71 d.addCallbacks(self.render_result,
88 72 errback=self.render_error)
89 73
90 74 return d
91 75
92 76
@@ -1,560 +1,560
1 1 # encoding: utf-8
2 2 # -*- test-case-name: IPython.frontend.cocoa.tests.test_cocoa_frontend -*-
3 3
4 4 """PyObjC classes to provide a Cocoa frontend to the
5 5 IPython.kernel.engineservice.IEngineBase.
6 6
7 7 To add an IPython interpreter to a cocoa app, instantiate an
8 8 IPythonCocoaController in a XIB and connect its textView outlet to an
9 9 NSTextView instance in your UI. That's it.
10 10
11 11 Author: Barry Wark
12 12 """
13 13
14 14 __docformat__ = "restructuredtext en"
15 15
16 16 #-----------------------------------------------------------------------------
17 17 # Copyright (C) 2008 The IPython Development Team
18 18 #
19 19 # Distributed under the terms of the BSD License. The full license is in
20 20 # the file COPYING, distributed as part of this software.
21 21 #-----------------------------------------------------------------------------
22 22
23 23 #-----------------------------------------------------------------------------
24 24 # Imports
25 25 #-----------------------------------------------------------------------------
26 26
27 27 import sys
28 28 import objc
29 import uuid
29 from IPython.external import guid
30 30
31 31 from Foundation import NSObject, NSMutableArray, NSMutableDictionary,\
32 32 NSLog, NSNotificationCenter, NSMakeRange,\
33 33 NSLocalizedString, NSIntersectionRange,\
34 34 NSString, NSAutoreleasePool
35 35
36 36 from AppKit import NSApplicationWillTerminateNotification, NSBeep,\
37 37 NSTextView, NSRulerView, NSVerticalRuler
38 38
39 39 from pprint import saferepr
40 40
41 41 import IPython
42 42 from IPython.kernel.engineservice import ThreadedEngineService
43 43 from IPython.frontend.asyncfrontendbase import AsyncFrontEndBase
44 44
45 45 from twisted.internet.threads import blockingCallFromThread
46 46 from twisted.python.failure import Failure
47 47
48 48 #-----------------------------------------------------------------------------
49 49 # Classes to implement the Cocoa frontend
50 50 #-----------------------------------------------------------------------------
51 51
52 52 # TODO:
53 53 # 1. use MultiEngineClient and out-of-process engine rather than
54 54 # ThreadedEngineService?
55 55 # 2. integrate Xgrid launching of engines
56 56
57 57 class AutoreleasePoolWrappedThreadedEngineService(ThreadedEngineService):
58 58 """Wrap all blocks in an NSAutoreleasePool"""
59 59
60 60 def wrapped_execute(self, msg, lines):
61 61 """wrapped_execute"""
62 62 try:
63 63 p = NSAutoreleasePool.alloc().init()
64 64 result = super(AutoreleasePoolWrappedThreadedEngineService,
65 65 self).wrapped_execute(msg, lines)
66 66 finally:
67 67 p.drain()
68 68
69 69 return result
70 70
71 71
72 72
73 73 class Cell(NSObject):
74 74 """
75 75 Representation of the prompts, input and output of a cell in the
76 76 frontend
77 77 """
78 78
79 79 blockNumber = objc.ivar().unsigned_long()
80 80 blockID = objc.ivar()
81 81 inputBlock = objc.ivar()
82 82 output = objc.ivar()
83 83
84 84
85 85
86 86 class CellBlock(object):
87 87 """
88 88 Storage for information about text ranges relating to a single cell
89 89 """
90 90
91 91
92 92 def __init__(self, inputPromptRange, inputRange=None, outputPromptRange=None,
93 93 outputRange=None):
94 94 super(CellBlock, self).__init__()
95 95 self.inputPromptRange = inputPromptRange
96 96 self.inputRange = inputRange
97 97 self.outputPromptRange = outputPromptRange
98 98 self.outputRange = outputRange
99 99
100 100 def update_ranges_for_insertion(self, text, textRange):
101 101 """Update ranges for text insertion at textRange"""
102 102
103 103 for r in [self.inputPromptRange,self.inputRange,
104 104 self.outputPromptRange, self.outputRange]:
105 105 if(r == None):
106 106 continue
107 107 intersection = NSIntersectionRange(r,textRange)
108 108 if(intersection.length == 0): #ranges don't intersect
109 109 if r.location >= textRange.location:
110 110 r.location += len(text)
111 111 else: #ranges intersect
112 112 if(r.location > textRange.location):
113 113 offset = len(text) - intersection.length
114 114 r.length -= offset
115 115 r.location += offset
116 116 elif(r.location == textRange.location):
117 117 r.length += len(text) - intersection.length
118 118 else:
119 119 r.length -= intersection.length
120 120
121 121
122 122 def update_ranges_for_deletion(self, textRange):
123 123 """Update ranges for text deletion at textRange"""
124 124
125 125 for r in [self.inputPromptRange,self.inputRange,
126 126 self.outputPromptRange, self.outputRange]:
127 127 if(r==None):
128 128 continue
129 129 intersection = NSIntersectionRange(r, textRange)
130 130 if(intersection.length == 0): #ranges don't intersect
131 131 if r.location >= textRange.location:
132 132 r.location -= textRange.length
133 133 else: #ranges intersect
134 134 if(r.location > textRange.location):
135 135 offset = intersection.length
136 136 r.length -= offset
137 137 r.location += offset
138 138 elif(r.location == textRange.location):
139 139 r.length += intersection.length
140 140 else:
141 141 r.length -= intersection.length
142 142
143 143 def __repr__(self):
144 144 return 'CellBlock('+ str((self.inputPromptRange,
145 145 self.inputRange,
146 146 self.outputPromptRange,
147 147 self.outputRange)) + ')'
148 148
149 149
150 150
151 151
152 152 class IPythonCocoaController(NSObject, AsyncFrontEndBase):
153 153 userNS = objc.ivar() #mirror of engine.user_ns (key=>str(value))
154 154 waitingForEngine = objc.ivar().bool()
155 155 textView = objc.IBOutlet()
156 156
157 157 def init(self):
158 158 self = super(IPythonCocoaController, self).init()
159 159 AsyncFrontEndBase.__init__(self,
160 160 engine=AutoreleasePoolWrappedThreadedEngineService())
161 161 if(self != None):
162 162 self._common_init()
163 163
164 164 return self
165 165
166 166 def _common_init(self):
167 167 """_common_init"""
168 168
169 169 self.userNS = NSMutableDictionary.dictionary()
170 170 self.waitingForEngine = False
171 171
172 172 self.lines = {}
173 173 self.tabSpaces = 4
174 174 self.tabUsesSpaces = True
175 175 self.currentBlockID = self.next_block_ID()
176 176 self.blockRanges = {} # blockID=>CellBlock
177 177
178 178
179 179 def awakeFromNib(self):
180 180 """awakeFromNib"""
181 181
182 182 self._common_init()
183 183
184 184 # Start the IPython engine
185 185 self.engine.startService()
186 186 NSLog('IPython engine started')
187 187
188 188 # Register for app termination
189 189 nc = NSNotificationCenter.defaultCenter()
190 190 nc.addObserver_selector_name_object_(
191 191 self,
192 192 'appWillTerminate:',
193 193 NSApplicationWillTerminateNotification,
194 194 None)
195 195
196 196 self.textView.setDelegate_(self)
197 197 self.textView.enclosingScrollView().setHasVerticalRuler_(True)
198 198 r = NSRulerView.alloc().initWithScrollView_orientation_(
199 199 self.textView.enclosingScrollView(),
200 200 NSVerticalRuler)
201 201 self.verticalRulerView = r
202 202 self.verticalRulerView.setClientView_(self.textView)
203 203 self._start_cli_banner()
204 204 self.start_new_block()
205 205
206 206
207 207 def appWillTerminate_(self, notification):
208 208 """appWillTerminate"""
209 209
210 210 self.engine.stopService()
211 211
212 212
213 213 def complete(self, token):
214 214 """Complete token in engine's user_ns
215 215
216 216 Parameters
217 217 ----------
218 218 token : string
219 219
220 220 Result
221 221 ------
222 222 Deferred result of
223 223 IPython.kernel.engineservice.IEngineBase.complete
224 224 """
225 225
226 226 return self.engine.complete(token)
227 227
228 228
229 229 def execute(self, block, blockID=None):
230 230 self.waitingForEngine = True
231 231 self.willChangeValueForKey_('commandHistory')
232 232 d = super(IPythonCocoaController, self).execute(block,
233 233 blockID)
234 234 d.addBoth(self._engine_done)
235 235 d.addCallback(self._update_user_ns)
236 236
237 237 return d
238 238
239 239
240 240 def push_(self, namespace):
241 241 """Push dictionary of key=>values to python namespace"""
242 242
243 243 self.waitingForEngine = True
244 244 self.willChangeValueForKey_('commandHistory')
245 245 d = self.engine.push(namespace)
246 246 d.addBoth(self._engine_done)
247 247 d.addCallback(self._update_user_ns)
248 248
249 249
250 250 def pull_(self, keys):
251 251 """Pull keys from python namespace"""
252 252
253 253 self.waitingForEngine = True
254 254 result = blockingCallFromThread(self.engine.pull, keys)
255 255 self.waitingForEngine = False
256 256
257 257 @objc.signature('v@:@I')
258 258 def executeFileAtPath_encoding_(self, path, encoding):
259 259 """Execute file at path in an empty namespace. Update the engine
260 260 user_ns with the resulting locals."""
261 261
262 262 lines,err = NSString.stringWithContentsOfFile_encoding_error_(
263 263 path,
264 264 encoding,
265 265 None)
266 266 self.engine.execute(lines)
267 267
268 268
269 269 def _engine_done(self, x):
270 270 self.waitingForEngine = False
271 271 self.didChangeValueForKey_('commandHistory')
272 272 return x
273 273
274 274 def _update_user_ns(self, result):
275 275 """Update self.userNS from self.engine's namespace"""
276 276 d = self.engine.keys()
277 277 d.addCallback(self._get_engine_namespace_values_for_keys)
278 278
279 279 return result
280 280
281 281
282 282 def _get_engine_namespace_values_for_keys(self, keys):
283 283 d = self.engine.pull(keys)
284 284 d.addCallback(self._store_engine_namespace_values, keys=keys)
285 285
286 286
287 287 def _store_engine_namespace_values(self, values, keys=[]):
288 288 assert(len(values) == len(keys))
289 289 self.willChangeValueForKey_('userNS')
290 290 for (k,v) in zip(keys,values):
291 291 self.userNS[k] = saferepr(v)
292 292 self.didChangeValueForKey_('userNS')
293 293
294 294
295 295 def update_cell_prompt(self, result, blockID=None):
296 296 print self.blockRanges
297 297 if(isinstance(result, Failure)):
298 298 prompt = self.input_prompt()
299 299
300 300 else:
301 301 prompt = self.input_prompt(number=result['number'])
302 302
303 303 r = self.blockRanges[blockID].inputPromptRange
304 304 self.insert_text(prompt,
305 305 textRange=r,
306 306 scrollToVisible=False
307 307 )
308 308
309 309 return result
310 310
311 311
312 312 def render_result(self, result):
313 313 blockID = result['blockID']
314 314 inputRange = self.blockRanges[blockID].inputRange
315 315 del self.blockRanges[blockID]
316 316
317 317 #print inputRange,self.current_block_range()
318 318 self.insert_text('\n' +
319 319 self.output_prompt(number=result['number']) +
320 320 result.get('display',{}).get('pprint','') +
321 321 '\n\n',
322 322 textRange=NSMakeRange(inputRange.location+inputRange.length,
323 323 0))
324 324 return result
325 325
326 326
327 327 def render_error(self, failure):
328 328 print failure
329 329 blockID = failure.blockID
330 330 inputRange = self.blockRanges[blockID].inputRange
331 331 self.insert_text('\n' +
332 332 self.output_prompt() +
333 333 '\n' +
334 334 failure.getErrorMessage() +
335 335 '\n\n',
336 336 textRange=NSMakeRange(inputRange.location +
337 337 inputRange.length,
338 338 0))
339 339 self.start_new_block()
340 340 return failure
341 341
342 342
343 343 def _start_cli_banner(self):
344 344 """Print banner"""
345 345
346 346 banner = """IPython1 %s -- An enhanced Interactive Python.""" % \
347 347 IPython.__version__
348 348
349 349 self.insert_text(banner + '\n\n')
350 350
351 351
352 352 def start_new_block(self):
353 353 """"""
354 354
355 355 self.currentBlockID = self.next_block_ID()
356 356 self.blockRanges[self.currentBlockID] = self.new_cell_block()
357 357 self.insert_text(self.input_prompt(),
358 358 textRange=self.current_block_range().inputPromptRange)
359 359
360 360
361 361
362 362 def next_block_ID(self):
363 363
364 return uuid.uuid4()
364 return guid.generate()
365 365
366 366 def new_cell_block(self):
367 367 """A new CellBlock at the end of self.textView.textStorage()"""
368 368
369 369 return CellBlock(NSMakeRange(self.textView.textStorage().length(),
370 370 0), #len(self.input_prompt())),
371 371 NSMakeRange(self.textView.textStorage().length(),# + len(self.input_prompt()),
372 372 0))
373 373
374 374
375 375 def current_block_range(self):
376 376 return self.blockRanges.get(self.currentBlockID,
377 377 self.new_cell_block())
378 378
379 379 def current_block(self):
380 380 """The current block's text"""
381 381
382 382 return self.text_for_range(self.current_block_range().inputRange)
383 383
384 384 def text_for_range(self, textRange):
385 385 """text_for_range"""
386 386
387 387 ts = self.textView.textStorage()
388 388 return ts.string().substringWithRange_(textRange)
389 389
390 390 def current_line(self):
391 391 block = self.text_for_range(self.current_block_range().inputRange)
392 392 block = block.split('\n')
393 393 return block[-1]
394 394
395 395
396 396 def insert_text(self, string=None, textRange=None, scrollToVisible=True):
397 397 """Insert text into textView at textRange, updating blockRanges
398 398 as necessary
399 399 """
400 400 if(textRange == None):
401 401 #range for end of text
402 402 textRange = NSMakeRange(self.textView.textStorage().length(), 0)
403 403
404 404
405 405 self.textView.replaceCharactersInRange_withString_(
406 406 textRange, string)
407 407
408 408 for r in self.blockRanges.itervalues():
409 409 r.update_ranges_for_insertion(string, textRange)
410 410
411 411 self.textView.setSelectedRange_(textRange)
412 412 if(scrollToVisible):
413 413 self.textView.scrollRangeToVisible_(textRange)
414 414
415 415
416 416
417 417 def replace_current_block_with_string(self, textView, string):
418 418 textView.replaceCharactersInRange_withString_(
419 419 self.current_block_range().inputRange,
420 420 string)
421 421 self.current_block_range().inputRange.length = len(string)
422 422 r = NSMakeRange(textView.textStorage().length(), 0)
423 423 textView.scrollRangeToVisible_(r)
424 424 textView.setSelectedRange_(r)
425 425
426 426
427 427 def current_indent_string(self):
428 428 """returns string for indent or None if no indent"""
429 429
430 430 return self._indent_for_block(self.current_block())
431 431
432 432
433 433 def _indent_for_block(self, block):
434 434 lines = block.split('\n')
435 435 if(len(lines) > 1):
436 436 currentIndent = len(lines[-1]) - len(lines[-1].lstrip())
437 437 if(currentIndent == 0):
438 438 currentIndent = self.tabSpaces
439 439
440 440 if(self.tabUsesSpaces):
441 441 result = ' ' * currentIndent
442 442 else:
443 443 result = '\t' * (currentIndent/self.tabSpaces)
444 444 else:
445 445 result = None
446 446
447 447 return result
448 448
449 449
450 450 # NSTextView delegate methods...
451 451 def textView_doCommandBySelector_(self, textView, selector):
452 452 assert(textView == self.textView)
453 453 NSLog("textView_doCommandBySelector_: "+selector)
454 454
455 455
456 456 if(selector == 'insertNewline:'):
457 457 indent = self.current_indent_string()
458 458 if(indent):
459 459 line = indent + self.current_line()
460 460 else:
461 461 line = self.current_line()
462 462
463 463 if(self.is_complete(self.current_block())):
464 464 self.execute(self.current_block(),
465 465 blockID=self.currentBlockID)
466 466 self.start_new_block()
467 467
468 468 return True
469 469
470 470 return False
471 471
472 472 elif(selector == 'moveUp:'):
473 473 prevBlock = self.get_history_previous(self.current_block())
474 474 if(prevBlock != None):
475 475 self.replace_current_block_with_string(textView, prevBlock)
476 476 else:
477 477 NSBeep()
478 478 return True
479 479
480 480 elif(selector == 'moveDown:'):
481 481 nextBlock = self.get_history_next()
482 482 if(nextBlock != None):
483 483 self.replace_current_block_with_string(textView, nextBlock)
484 484 else:
485 485 NSBeep()
486 486 return True
487 487
488 488 elif(selector == 'moveToBeginningOfParagraph:'):
489 489 textView.setSelectedRange_(NSMakeRange(
490 490 self.current_block_range().inputRange.location,
491 491 0))
492 492 return True
493 493 elif(selector == 'moveToEndOfParagraph:'):
494 494 textView.setSelectedRange_(NSMakeRange(
495 495 self.current_block_range().inputRange.location + \
496 496 self.current_block_range().inputRange.length, 0))
497 497 return True
498 498 elif(selector == 'deleteToEndOfParagraph:'):
499 499 if(textView.selectedRange().location <= \
500 500 self.current_block_range().location):
501 501 raise NotImplemented()
502 502
503 503 return False # don't actually handle the delete
504 504
505 505 elif(selector == 'insertTab:'):
506 506 if(len(self.current_line().strip()) == 0): #only white space
507 507 return False
508 508 else:
509 509 self.textView.complete_(self)
510 510 return True
511 511
512 512 elif(selector == 'deleteBackward:'):
513 513 #if we're at the beginning of the current block, ignore
514 514 if(textView.selectedRange().location == \
515 515 self.current_block_range().inputRange.location):
516 516 return True
517 517 else:
518 518 for r in self.blockRanges.itervalues():
519 519 deleteRange = textView.selectedRange
520 520 if(deleteRange.length == 0):
521 521 deleteRange.location -= 1
522 522 deleteRange.length = 1
523 523 r.update_ranges_for_deletion(deleteRange)
524 524 return False
525 525 return False
526 526
527 527
528 528 def textView_shouldChangeTextInRanges_replacementStrings_(self,
529 529 textView, ranges, replacementStrings):
530 530 """
531 531 Delegate method for NSTextView.
532 532
533 533 Refuse change text in ranges not at end, but make those changes at
534 534 end.
535 535 """
536 536
537 537 assert(len(ranges) == len(replacementStrings))
538 538 allow = True
539 539 for r,s in zip(ranges, replacementStrings):
540 540 r = r.rangeValue()
541 541 if(textView.textStorage().length() > 0 and
542 542 r.location < self.current_block_range().inputRange.location):
543 543 self.insert_text(s)
544 544 allow = False
545 545
546 546 return allow
547 547
548 548 def textView_completions_forPartialWordRange_indexOfSelectedItem_(self,
549 549 textView, words, charRange, index):
550 550 try:
551 551 ts = textView.textStorage()
552 552 token = ts.string().substringWithRange_(charRange)
553 553 completions = blockingCallFromThread(self.complete, token)
554 554 except:
555 555 completions = objc.nil
556 556 NSBeep()
557 557
558 558 return (completions,0)
559 559
560 560
@@ -1,91 +1,94
1 1 # encoding: utf-8
2 2 """This file contains unittests for the
3 3 IPython.frontend.cocoa.cocoa_frontend module.
4 4 """
5 5 __docformat__ = "restructuredtext en"
6 6
7 7 #---------------------------------------------------------------------------
8 8 # Copyright (C) 2005 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #---------------------------------------------------------------------------
13 13
14 14 #---------------------------------------------------------------------------
15 15 # Imports
16 16 #---------------------------------------------------------------------------
17 from IPython.kernel.core.interpreter import Interpreter
18 import IPython.kernel.engineservice as es
19 from IPython.testing.util import DeferredTestCase
20 from twisted.internet.defer import succeed
21 from IPython.frontend.cocoa.cocoa_frontend import IPythonCocoaController
22
23 from Foundation import NSMakeRect
24 from AppKit import NSTextView, NSScrollView
17
18 try:
19 from IPython.kernel.core.interpreter import Interpreter
20 import IPython.kernel.engineservice as es
21 from IPython.testing.util import DeferredTestCase
22 from twisted.internet.defer import succeed
23 from IPython.frontend.cocoa.cocoa_frontend import IPythonCocoaController
24 from Foundation import NSMakeRect
25 from AppKit import NSTextView, NSScrollView
26 except ImportError:
27 import nose
28 raise nose.SkipTest("This test requires zope.interface, Twisted, Foolscap and PyObjC")
25 29
26 30 class TestIPythonCocoaControler(DeferredTestCase):
27 31 """Tests for IPythonCocoaController"""
28
32
29 33 def setUp(self):
30 34 self.controller = IPythonCocoaController.alloc().init()
31 35 self.engine = es.EngineService()
32 36 self.engine.startService()
33
34
37
35 38 def tearDown(self):
36 39 self.controller = None
37 40 self.engine.stopService()
38
41
39 42 def testControllerExecutesCode(self):
40 43 code ="""5+5"""
41 44 expected = Interpreter().execute(code)
42 45 del expected['number']
43 46 def removeNumberAndID(result):
44 47 del result['number']
45 48 del result['id']
46 49 return result
47 50 self.assertDeferredEquals(
48 51 self.controller.execute(code).addCallback(removeNumberAndID),
49 52 expected)
50
53
51 54 def testControllerMirrorsUserNSWithValuesAsStrings(self):
52 55 code = """userns1=1;userns2=2"""
53 56 def testControllerUserNS(result):
54 57 self.assertEquals(self.controller.userNS['userns1'], 1)
55 58 self.assertEquals(self.controller.userNS['userns2'], 2)
56
59
57 60 self.controller.execute(code).addCallback(testControllerUserNS)
58
59
61
62
60 63 def testControllerInstantiatesIEngine(self):
61 64 self.assert_(es.IEngineBase.providedBy(self.controller.engine))
62
65
63 66 def testControllerCompletesToken(self):
64 67 code = """longNameVariable=10"""
65 68 def testCompletes(result):
66 69 self.assert_("longNameVariable" in result)
67
70
68 71 def testCompleteToken(result):
69 72 self.controller.complete("longNa").addCallback(testCompletes)
70
73
71 74 self.controller.execute(code).addCallback(testCompletes)
72
73
75
76
74 77 def testCurrentIndent(self):
75 78 """test that current_indent_string returns current indent or None.
76 79 Uses _indent_for_block for direct unit testing.
77 80 """
78
81
79 82 self.controller.tabUsesSpaces = True
80 83 self.assert_(self.controller._indent_for_block("""a=3""") == None)
81 84 self.assert_(self.controller._indent_for_block("") == None)
82 85 block = """def test():\n a=3"""
83 86 self.assert_(self.controller._indent_for_block(block) == \
84 87 ' ' * self.controller.tabSpaces)
85
88
86 89 block = """if(True):\n%sif(False):\n%spass""" % \
87 90 (' '*self.controller.tabSpaces,
88 91 2*' '*self.controller.tabSpaces)
89 92 self.assert_(self.controller._indent_for_block(block) == \
90 93 2*(' '*self.controller.tabSpaces))
91
94
@@ -1,359 +1,343
1 1 # encoding: utf-8
2 2 # -*- test-case-name: IPython.frontend.tests.test_frontendbase -*-
3 3 """
4 4 frontendbase provides an interface and base class for GUI frontends for
5 5 IPython.kernel/IPython.kernel.core.
6 6
7 7 Frontend implementations will likely want to subclass FrontEndBase.
8 8
9 9 Author: Barry Wark
10 10 """
11 11 __docformat__ = "restructuredtext en"
12 12
13 13 #-------------------------------------------------------------------------------
14 14 # Copyright (C) 2008 The IPython Development Team
15 15 #
16 16 # Distributed under the terms of the BSD License. The full license is in
17 17 # the file COPYING, distributed as part of this software.
18 18 #-------------------------------------------------------------------------------
19 19
20 20 #-------------------------------------------------------------------------------
21 21 # Imports
22 22 #-------------------------------------------------------------------------------
23 23 import string
24 import uuid
25 import _ast
24 import codeop
25 from IPython.external import guid
26 26
27 from zopeinterface import Interface, Attribute, implements, classProvides
28 27
28 from IPython.frontend.zopeinterface import (
29 Interface,
30 Attribute,
31 )
29 32 from IPython.kernel.core.history import FrontEndHistory
30 33 from IPython.kernel.core.util import Bunch
31 from IPython.kernel.engineservice import IEngineCore
32 34
33 35 ##############################################################################
34 36 # TEMPORARY!!! fake configuration, while we decide whether to use tconfig or
35 37 # not
36 38
37 39 rc = Bunch()
38 40 rc.prompt_in1 = r'In [$number]: '
39 41 rc.prompt_in2 = r'...'
40 42 rc.prompt_out = r'Out [$number]: '
41 43
42 44 ##############################################################################
43 45 # Interface definitions
44 46 ##############################################################################
45 47
46 48 class IFrontEndFactory(Interface):
47 49 """Factory interface for frontends."""
48 50
49 51 def __call__(engine=None, history=None):
50 52 """
51 53 Parameters:
52 54 interpreter : IPython.kernel.engineservice.IEngineCore
53 55 """
54 56
55 57 pass
56 58
57 59
58 60 class IFrontEnd(Interface):
59 61 """Interface for frontends. All methods return t.i.d.Deferred"""
60 62
61 63 Attribute("input_prompt_template", "string.Template instance\
62 64 substituteable with execute result.")
63 65 Attribute("output_prompt_template", "string.Template instance\
64 66 substituteable with execute result.")
65 67 Attribute("continuation_prompt_template", "string.Template instance\
66 68 substituteable with execute result.")
67 69
68 70 def update_cell_prompt(result, blockID=None):
69 71 """Subclass may override to update the input prompt for a block.
70 72
71 73 In asynchronous frontends, this method will be called as a
72 74 twisted.internet.defer.Deferred's callback/errback.
73 75 Implementations should thus return result when finished.
74 76
75 77 Result is a result dict in case of success, and a
76 78 twisted.python.util.failure.Failure in case of an error
77 79 """
78 80
79 81 pass
80 82
81 83 def render_result(result):
82 84 """Render the result of an execute call. Implementors may choose the
83 85 method of rendering.
84 86 For example, a notebook-style frontend might render a Chaco plot
85 87 inline.
86 88
87 89 Parameters:
88 90 result : dict (result of IEngineBase.execute )
89 91 blockID = result['blockID']
90 92
91 93 Result:
92 94 Output of frontend rendering
93 95 """
94 96
95 97 pass
96 98
97 99 def render_error(failure):
98 100 """Subclasses must override to render the failure.
99 101
100 102 In asynchronous frontend, since this method will be called as a
101 103 twisted.internet.defer.Deferred's callback. Implementations
102 104 should thus return result when finished.
103 105
104 106 blockID = failure.blockID
105 107 """
106 108
107 109 pass
108 110
109 111 def input_prompt(number=''):
110 112 """Returns the input prompt by subsituting into
111 113 self.input_prompt_template
112 114 """
113 115 pass
114 116
115 117 def output_prompt(number=''):
116 118 """Returns the output prompt by subsituting into
117 119 self.output_prompt_template
118 120 """
119 121
120 122 pass
121 123
122 124 def continuation_prompt():
123 125 """Returns the continuation prompt by subsituting into
124 126 self.continuation_prompt_template
125 127 """
126 128
127 129 pass
128 130
129 131 def is_complete(block):
130 132 """Returns True if block is complete, False otherwise."""
131 133
132 134 pass
133 135
134 def compile_ast(block):
135 """Compiles block to an _ast.AST"""
136
137 pass
138
136
139 137 def get_history_previous(current_block):
140 138 """Returns the block previous in the history. Saves currentBlock if
141 139 the history_cursor is currently at the end of the input history"""
142 140 pass
143 141
144 142 def get_history_next():
145 143 """Returns the next block in the history."""
146 144
147 145 pass
148 146
149 147 def complete(self, line):
150 148 """Returns the list of possible completions, and the completed
151 149 line.
152 150
153 151 The input argument is the full line to be completed. This method
154 152 returns both the line completed as much as possible, and the list
155 153 of further possible completions (full words).
156 154 """
157 155 pass
158 156
159 157
160 158 ##############################################################################
161 159 # Base class for all the frontends.
162 160 ##############################################################################
163 161
164 162 class FrontEndBase(object):
165 163 """
166 164 FrontEndBase manages the state tasks for a CLI frontend:
167 165 - Input and output history management
168 166 - Input/continuation and output prompt generation
169 167
170 168 Some issues (due to possibly unavailable engine):
171 169 - How do we get the current cell number for the engine?
172 170 - How do we handle completions?
173 171 """
174 172
175 173 history_cursor = 0
176 174
177 175 input_prompt_template = string.Template(rc.prompt_in1)
178 176 output_prompt_template = string.Template(rc.prompt_out)
179 177 continuation_prompt_template = string.Template(rc.prompt_in2)
180 178
181 179 def __init__(self, shell=None, history=None):
182 180 self.shell = shell
183 181 if history is None:
184 182 self.history = FrontEndHistory(input_cache=[''])
185 183 else:
186 184 self.history = history
187 185
188 186
189 187 def input_prompt(self, number=''):
190 188 """Returns the current input prompt
191 189
192 190 It would be great to use ipython1.core.prompts.Prompt1 here
193 191 """
194 192 return self.input_prompt_template.safe_substitute({'number':number})
195 193
196 194
197 195 def continuation_prompt(self):
198 196 """Returns the current continuation prompt"""
199 197
200 198 return self.continuation_prompt_template.safe_substitute()
201 199
202 200 def output_prompt(self, number=''):
203 201 """Returns the output prompt for result"""
204 202
205 203 return self.output_prompt_template.safe_substitute({'number':number})
206 204
207 205
208 206 def is_complete(self, block):
209 207 """Determine if block is complete.
210 208
211 209 Parameters
212 210 block : string
213 211
214 212 Result
215 213 True if block can be sent to the engine without compile errors.
216 214 False otherwise.
217 215 """
218 216
219 217 try:
220 ast = self.compile_ast(block)
218 is_complete = codeop.compile_command(block.rstrip() + '\n\n',
219 "<string>", "exec")
221 220 except:
222 221 return False
223 222
224 223 lines = block.split('\n')
225 return (len(lines)==1 or str(lines[-1])=='')
226
227
228 def compile_ast(self, block):
229 """Compile block to an AST
230
231 Parameters:
232 block : str
233
234 Result:
235 AST
236
237 Throws:
238 Exception if block cannot be compiled
239 """
240
241 return compile(block, "<string>", "exec", _ast.PyCF_ONLY_AST)
224 return ((is_complete is not None)
225 and (len(lines)==1 or str(lines[-1])==''))
242 226
243 227
244 228 def execute(self, block, blockID=None):
245 229 """Execute the block and return the result.
246 230
247 231 Parameters:
248 232 block : {str, AST}
249 233 blockID : any
250 234 Caller may provide an ID to identify this block.
251 235 result['blockID'] := blockID
252 236
253 237 Result:
254 238 Deferred result of self.interpreter.execute
255 239 """
256 240
257 241 if(not self.is_complete(block)):
258 242 raise Exception("Block is not compilable")
259 243
260 244 if(blockID == None):
261 blockID = uuid.uuid4() #random UUID
245 blockID = guid.generate()
262 246
263 247 try:
264 248 result = self.shell.execute(block)
265 249 except Exception,e:
266 250 e = self._add_block_id_for_failure(e, blockID=blockID)
267 251 e = self.update_cell_prompt(e, blockID=blockID)
268 252 e = self.render_error(e)
269 253 else:
270 254 result = self._add_block_id_for_result(result, blockID=blockID)
271 255 result = self.update_cell_prompt(result, blockID=blockID)
272 256 result = self.render_result(result)
273 257
274 258 return result
275 259
276 260
277 261 def _add_block_id_for_result(self, result, blockID):
278 262 """Add the blockID to result or failure. Unfortunatley, we have to
279 263 treat failures differently than result dicts.
280 264 """
281 265
282 266 result['blockID'] = blockID
283 267
284 268 return result
285 269
286 270 def _add_block_id_for_failure(self, failure, blockID):
287 271 """_add_block_id_for_failure"""
288 272 failure.blockID = blockID
289 273 return failure
290 274
291 275
292 276 def _add_history(self, result, block=None):
293 277 """Add block to the history"""
294 278
295 279 assert(block != None)
296 280 self.history.add_items([block])
297 281 self.history_cursor += 1
298 282
299 283 return result
300 284
301 285
302 286 def get_history_previous(self, current_block):
303 287 """ Returns previous history string and decrement history cursor.
304 288 """
305 289 command = self.history.get_history_item(self.history_cursor - 1)
306 290
307 291 if command is not None:
308 292 if(self.history_cursor+1 == len(self.history.input_cache)):
309 293 self.history.input_cache[self.history_cursor] = current_block
310 294 self.history_cursor -= 1
311 295 return command
312 296
313 297
314 298 def get_history_next(self):
315 299 """ Returns next history string and increment history cursor.
316 300 """
317 301 command = self.history.get_history_item(self.history_cursor+1)
318 302
319 303 if command is not None:
320 304 self.history_cursor += 1
321 305 return command
322 306
323 307 ###
324 308 # Subclasses probably want to override these methods...
325 309 ###
326 310
327 311 def update_cell_prompt(self, result, blockID=None):
328 312 """Subclass may override to update the input prompt for a block.
329 313
330 314 This method only really makes sens in asyncrhonous frontend.
331 315 Since this method will be called as a
332 316 twisted.internet.defer.Deferred's callback, implementations should
333 317 return result when finished.
334 318 """
335 319
336 320 raise NotImplementedError
337 321
338 322
339 323 def render_result(self, result):
340 324 """Subclasses must override to render result.
341 325
342 326 In asynchronous frontends, this method will be called as a
343 327 twisted.internet.defer.Deferred's callback. Implementations
344 328 should thus return result when finished.
345 329 """
346 330
347 331 raise NotImplementedError
348 332
349 333
350 334 def render_error(self, failure):
351 335 """Subclasses must override to render the failure.
352 336
353 337 In asynchronous frontends, this method will be called as a
354 338 twisted.internet.defer.Deferred's callback. Implementations
355 339 should thus return result when finished.
356 340 """
357 341
358 342 raise NotImplementedError
359 343
@@ -1,294 +1,333
1 1 """
2 2 Base front end class for all line-oriented frontends, rather than
3 3 block-oriented.
4 4
5 5 Currently this focuses on synchronous frontends.
6 6 """
7 7 __docformat__ = "restructuredtext en"
8 8
9 9 #-------------------------------------------------------------------------------
10 10 # Copyright (C) 2008 The IPython Development Team
11 11 #
12 12 # Distributed under the terms of the BSD License. The full license is in
13 13 # the file COPYING, distributed as part of this software.
14 14 #-------------------------------------------------------------------------------
15 15
16 16 #-------------------------------------------------------------------------------
17 17 # Imports
18 18 #-------------------------------------------------------------------------------
19 19 import re
20 20
21 21 import IPython
22 22 import sys
23 import codeop
24 import traceback
23 25
24 26 from frontendbase import FrontEndBase
25 27 from IPython.kernel.core.interpreter import Interpreter
26 28
27 29 def common_prefix(strings):
28 30 """ Given a list of strings, return the common prefix between all
29 31 these strings.
30 32 """
31 33 ref = strings[0]
32 34 prefix = ''
33 35 for size in range(len(ref)):
34 36 test_prefix = ref[:size+1]
35 37 for string in strings[1:]:
36 38 if not string.startswith(test_prefix):
37 39 return prefix
38 40 prefix = test_prefix
39 41
40 42 return prefix
41 43
42 44 #-------------------------------------------------------------------------------
43 45 # Base class for the line-oriented front ends
44 46 #-------------------------------------------------------------------------------
45 47 class LineFrontEndBase(FrontEndBase):
46 48 """ Concrete implementation of the FrontEndBase class. This is meant
47 49 to be the base class behind all the frontend that are line-oriented,
48 50 rather than block-oriented.
49 51 """
50 52
51 53 # We need to keep the prompt number, to be able to increment
52 54 # it when there is an exception.
53 55 prompt_number = 1
54 56
55 57 # We keep a reference to the last result: it helps testing and
56 58 # programatic control of the frontend.
57 59 last_result = dict(number=0)
58 60
59 61 # The input buffer being edited
60 62 input_buffer = ''
61 63
62 64 # Set to true for debug output
63 65 debug = False
64 66
65 67 # A banner to print at startup
66 68 banner = None
67 69
68 70 #--------------------------------------------------------------------------
69 71 # FrontEndBase interface
70 72 #--------------------------------------------------------------------------
71 73
72 74 def __init__(self, shell=None, history=None, banner=None, *args, **kwargs):
73 75 if shell is None:
74 76 shell = Interpreter()
75 77 FrontEndBase.__init__(self, shell=shell, history=history)
76 78
77 79 if banner is not None:
78 80 self.banner = banner
81
82 def start(self):
83 """ Put the frontend in a state where it is ready for user
84 interaction.
85 """
79 86 if self.banner is not None:
80 87 self.write(self.banner, refresh=False)
81 88
82 89 self.new_prompt(self.input_prompt_template.substitute(number=1))
83 90
84 91
85 92 def complete(self, line):
86 93 """Complete line in engine's user_ns
87 94
88 95 Parameters
89 96 ----------
90 97 line : string
91 98
92 99 Result
93 100 ------
94 101 The replacement for the line and the list of possible completions.
95 102 """
96 103 completions = self.shell.complete(line)
97 104 complete_sep = re.compile('[\s\{\}\[\]\(\)\=]')
98 105 if completions:
99 106 prefix = common_prefix(completions)
100 107 residual = complete_sep.split(line)[:-1]
101 108 line = line[:-len(residual)] + prefix
102 109 return line, completions
103 110
104 111
105 112 def render_result(self, result):
106 113 """ Frontend-specific rendering of the result of a calculation
107 114 that has been sent to an engine.
108 115 """
109 116 if 'stdout' in result and result['stdout']:
110 117 self.write('\n' + result['stdout'])
111 118 if 'display' in result and result['display']:
112 119 self.write("%s%s\n" % (
113 120 self.output_prompt_template.substitute(
114 121 number=result['number']),
115 122 result['display']['pprint']
116 123 ) )
117 124
118 125
119 126 def render_error(self, failure):
120 127 """ Frontend-specific rendering of error.
121 128 """
122 129 self.write('\n\n'+str(failure)+'\n\n')
123 130 return failure
124 131
125 132
126 133 def is_complete(self, string):
127 134 """ Check if a string forms a complete, executable set of
128 135 commands.
129 136
130 137 For the line-oriented frontend, multi-line code is not executed
131 138 as soon as it is complete: the users has to enter two line
132 139 returns.
133 140 """
134 141 if string in ('', '\n'):
135 142 # Prefiltering, eg through ipython0, may return an empty
136 143 # string although some operations have been accomplished. We
137 144 # thus want to consider an empty string as a complete
138 145 # statement.
139 146 return True
140 147 elif ( len(self.input_buffer.split('\n'))>2
141 148 and not re.findall(r"\n[\t ]*\n[\t ]*$", string)):
142 149 return False
143 150 else:
144 # Add line returns here, to make sure that the statement is
145 # complete.
146 return FrontEndBase.is_complete(self, string.rstrip() + '\n\n')
151 self.capture_output()
152 try:
153 # Add line returns here, to make sure that the statement is
154 # complete.
155 is_complete = codeop.compile_command(string.rstrip() + '\n\n',
156 "<string>", "exec")
157 self.release_output()
158 except Exception, e:
159 # XXX: Hack: return True so that the
160 # code gets executed and the error captured.
161 is_complete = True
162 return is_complete
147 163
148 164
149 165 def write(self, string, refresh=True):
150 166 """ Write some characters to the display.
151 167
152 168 Subclass should overide this method.
153 169
154 170 The refresh keyword argument is used in frontends with an
155 171 event loop, to choose whether the write should trigget an UI
156 172 refresh, and thus be syncrhonous, or not.
157 173 """
158 174 print >>sys.__stderr__, string
159 175
160 176
161 177 def execute(self, python_string, raw_string=None):
162 178 """ Stores the raw_string in the history, and sends the
163 179 python string to the interpreter.
164 180 """
165 181 if raw_string is None:
166 182 raw_string = python_string
167 183 # Create a false result, in case there is an exception
168 184 self.last_result = dict(number=self.prompt_number)
185
186 ## try:
187 ## self.history.input_cache[-1] = raw_string.rstrip()
188 ## result = self.shell.execute(python_string)
189 ## self.last_result = result
190 ## self.render_result(result)
191 ## except:
192 ## self.show_traceback()
193 ## finally:
194 ## self.after_execute()
195
169 196 try:
170 self.history.input_cache[-1] = raw_string.rstrip()
171 result = self.shell.execute(python_string)
172 self.last_result = result
173 self.render_result(result)
174 except:
175 self.show_traceback()
197 try:
198 self.history.input_cache[-1] = raw_string.rstrip()
199 result = self.shell.execute(python_string)
200 self.last_result = result
201 self.render_result(result)
202 except:
203 self.show_traceback()
176 204 finally:
177 205 self.after_execute()
178 206
207
179 208 #--------------------------------------------------------------------------
180 209 # LineFrontEndBase interface
181 210 #--------------------------------------------------------------------------
182 211
183 212 def prefilter_input(self, string):
184 """ Priflter the input to turn it in valid python.
213 """ Prefilter the input to turn it in valid python.
185 214 """
186 215 string = string.replace('\r\n', '\n')
187 216 string = string.replace('\t', 4*' ')
188 217 # Clean the trailing whitespace
189 218 string = '\n'.join(l.rstrip() for l in string.split('\n'))
190 219 return string
191 220
192 221
193 222 def after_execute(self):
194 223 """ All the operations required after an execution to put the
195 224 terminal back in a shape where it is usable.
196 225 """
197 226 self.prompt_number += 1
198 227 self.new_prompt(self.input_prompt_template.substitute(
199 228 number=(self.last_result['number'] + 1)))
200 229 # Start a new empty history entry
201 230 self._add_history(None, '')
202 231 self.history_cursor = len(self.history.input_cache) - 1
203 232
204 233
205 234 def complete_current_input(self):
206 235 """ Do code completion on current line.
207 236 """
208 237 if self.debug:
209 238 print >>sys.__stdout__, "complete_current_input",
210 239 line = self.input_buffer
211 240 new_line, completions = self.complete(line)
212 241 if len(completions)>1:
213 self.write_completion(completions)
214 self.input_buffer = new_line
242 self.write_completion(completions, new_line=new_line)
243 elif not line == new_line:
244 self.input_buffer = new_line
215 245 if self.debug:
246 print >>sys.__stdout__, 'line', line
247 print >>sys.__stdout__, 'new_line', new_line
216 248 print >>sys.__stdout__, completions
217 249
218 250
219 251 def get_line_width(self):
220 252 """ Return the width of the line in characters.
221 253 """
222 254 return 80
223 255
224 256
225 def write_completion(self, possibilities):
257 def write_completion(self, possibilities, new_line=None):
226 258 """ Write the list of possible completions.
259
260 new_line is the completed input line that should be displayed
261 after the completion are writen. If None, the input_buffer
262 before the completion is used.
227 263 """
228 current_buffer = self.input_buffer
264 if new_line is None:
265 new_line = self.input_buffer
229 266
230 267 self.write('\n')
231 268 max_len = len(max(possibilities, key=len)) + 1
232 269
233 270 # Now we check how much symbol we can put on a line...
234 271 chars_per_line = self.get_line_width()
235 272 symbols_per_line = max(1, chars_per_line/max_len)
236 273
237 274 pos = 1
238 275 buf = []
239 276 for symbol in possibilities:
240 277 if pos < symbols_per_line:
241 278 buf.append(symbol.ljust(max_len))
242 279 pos += 1
243 280 else:
244 281 buf.append(symbol.rstrip() + '\n')
245 282 pos = 1
246 283 self.write(''.join(buf))
247 284 self.new_prompt(self.input_prompt_template.substitute(
248 285 number=self.last_result['number'] + 1))
249 self.input_buffer = current_buffer
286 self.input_buffer = new_line
250 287
251 288
252 289 def new_prompt(self, prompt):
253 290 """ Prints a prompt and starts a new editing buffer.
254 291
255 292 Subclasses should use this method to make sure that the
256 293 terminal is put in a state favorable for a new line
257 294 input.
258 295 """
259 296 self.input_buffer = ''
260 297 self.write(prompt)
261 298
262 299
263 300 #--------------------------------------------------------------------------
264 301 # Private API
265 302 #--------------------------------------------------------------------------
266 303
267 304 def _on_enter(self):
268 305 """ Called when the return key is pressed in a line editing
269 306 buffer.
270 307 """
271 308 current_buffer = self.input_buffer
272 309 cleaned_buffer = self.prefilter_input(current_buffer)
273 310 if self.is_complete(cleaned_buffer):
274 311 self.execute(cleaned_buffer, raw_string=current_buffer)
275 312 else:
276 313 self.input_buffer += self._get_indent_string(
277 314 current_buffer[:-1])
315 if len(current_buffer.split('\n')) == 2:
316 self.input_buffer += '\t\t'
278 317 if current_buffer[:-1].split('\n')[-1].rstrip().endswith(':'):
279 318 self.input_buffer += '\t'
280 319
281 320
282 321 def _get_indent_string(self, string):
283 322 """ Return the string of whitespace that prefixes a line. Used to
284 323 add the right amount of indendation when creating a new line.
285 324 """
286 325 string = string.replace('\t', ' '*4)
287 326 string = string.split('\n')[-1]
288 327 indent_chars = len(string) - len(string.lstrip())
289 328 indent_string = '\t'*(indent_chars // 4) + \
290 329 ' '*(indent_chars % 4)
291 330
292 331 return indent_string
293 332
294 333
@@ -1,223 +1,246
1 1 """
2 2 Frontend class that uses IPython0 to prefilter the inputs.
3 3
4 4 Using the IPython0 mechanism gives us access to the magics.
5 5
6 6 This is a transitory class, used here to do the transition between
7 7 ipython0 and ipython1. This class is meant to be short-lived as more
8 8 functionnality is abstracted out of ipython0 in reusable functions and
9 9 is added on the interpreter. This class can be a used to guide this
10 10 refactoring.
11 11 """
12 12 __docformat__ = "restructuredtext en"
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Copyright (C) 2008 The IPython Development Team
16 16 #
17 17 # Distributed under the terms of the BSD License. The full license is in
18 18 # the file COPYING, distributed as part of this software.
19 19 #-------------------------------------------------------------------------------
20 20
21 21 #-------------------------------------------------------------------------------
22 22 # Imports
23 23 #-------------------------------------------------------------------------------
24 24 import sys
25 25
26 26 from linefrontendbase import LineFrontEndBase, common_prefix
27 from frontendbase import FrontEndBase
27 28
28 29 from IPython.ipmaker import make_IPython
29 30 from IPython.ipapi import IPApi
30 31 from IPython.kernel.core.redirector_output_trap import RedirectorOutputTrap
31 32
32 33 from IPython.kernel.core.sync_traceback_trap import SyncTracebackTrap
33 34
34 35 from IPython.genutils import Term
35 36 import pydoc
36 37 import os
38 import sys
37 39
38 40
39 41 def mk_system_call(system_call_function, command):
40 42 """ given a os.system replacement, and a leading string command,
41 43 returns a function that will execute the command with the given
42 44 argument string.
43 45 """
44 46 def my_system_call(args):
45 47 system_call_function("%s %s" % (command, args))
46 48 return my_system_call
47 49
48 50 #-------------------------------------------------------------------------------
49 51 # Frontend class using ipython0 to do the prefiltering.
50 52 #-------------------------------------------------------------------------------
51 53 class PrefilterFrontEnd(LineFrontEndBase):
52 54 """ Class that uses ipython0 to do prefilter the input, do the
53 55 completion and the magics.
54 56
55 57 The core trick is to use an ipython0 instance to prefilter the
56 58 input, and share the namespace between the interpreter instance used
57 59 to execute the statements and the ipython0 used for code
58 60 completion...
59 61 """
62
63 debug = False
60 64
61 65 def __init__(self, ipython0=None, *args, **kwargs):
62 66 """ Parameters:
63 67 -----------
64 68
65 69 ipython0: an optional ipython0 instance to use for command
66 70 prefiltering and completion.
67 71 """
72 LineFrontEndBase.__init__(self, *args, **kwargs)
73 self.shell.output_trap = RedirectorOutputTrap(
74 out_callback=self.write,
75 err_callback=self.write,
76 )
77 self.shell.traceback_trap = SyncTracebackTrap(
78 formatters=self.shell.traceback_trap.formatters,
79 )
80
81 # Start the ipython0 instance:
68 82 self.save_output_hooks()
69 83 if ipython0 is None:
70 84 # Instanciate an IPython0 interpreter to be able to use the
71 85 # prefiltering.
72 86 # XXX: argv=[] is a bit bold.
73 ipython0 = make_IPython(argv=[])
87 ipython0 = make_IPython(argv=[],
88 user_ns=self.shell.user_ns,
89 user_global_ns=self.shell.user_global_ns)
74 90 self.ipython0 = ipython0
75 91 # Set the pager:
76 92 self.ipython0.set_hook('show_in_pager',
77 93 lambda s, string: self.write("\n" + string))
78 94 self.ipython0.write = self.write
79 95 self._ip = _ip = IPApi(self.ipython0)
80 96 # Make sure the raw system call doesn't get called, as we don't
81 97 # have a stdin accessible.
82 98 self._ip.system = self.system_call
83 99 # XXX: Muck around with magics so that they work better
84 100 # in our environment
85 101 self.ipython0.magic_ls = mk_system_call(self.system_call,
86 102 'ls -CF')
87 103 # And now clean up the mess created by ipython0
88 104 self.release_output()
105
106
89 107 if not 'banner' in kwargs and self.banner is None:
90 kwargs['banner'] = self.ipython0.BANNER + """
108 self.banner = self.ipython0.BANNER + """
91 109 This is the wx frontend, by Gael Varoquaux. This is EXPERIMENTAL code."""
92 110
93 LineFrontEndBase.__init__(self, *args, **kwargs)
94 # XXX: Hack: mix the two namespaces
95 self.shell.user_ns.update(self.ipython0.user_ns)
96 self.ipython0.user_ns = self.shell.user_ns
97 self.shell.user_global_ns.update(self.ipython0.user_global_ns)
98 self.ipython0.user_global_ns = self.shell.user_global_ns
99
100 self.shell.output_trap = RedirectorOutputTrap(
101 out_callback=self.write,
102 err_callback=self.write,
103 )
104 self.shell.traceback_trap = SyncTracebackTrap(
105 formatters=self.shell.traceback_trap.formatters,
106 )
111 self.start()
107 112
108 113 #--------------------------------------------------------------------------
109 114 # FrontEndBase interface
110 115 #--------------------------------------------------------------------------
111 116
112 117 def show_traceback(self):
113 118 """ Use ipython0 to capture the last traceback and display it.
114 119 """
115 120 self.capture_output()
116 self.ipython0.showtraceback()
121 self.ipython0.showtraceback(tb_offset=-1)
117 122 self.release_output()
118 123
119 124
120 125 def execute(self, python_string, raw_string=None):
121 126 if self.debug:
122 127 print 'Executing Python code:', repr(python_string)
123 128 self.capture_output()
124 129 LineFrontEndBase.execute(self, python_string,
125 130 raw_string=raw_string)
126 131 self.release_output()
127 132
128 133
129 134 def save_output_hooks(self):
130 135 """ Store all the output hooks we can think of, to be able to
131 136 restore them.
132 137
133 138 We need to do this early, as starting the ipython0 instance will
134 139 screw ouput hooks.
135 140 """
136 141 self.__old_cout_write = Term.cout.write
137 142 self.__old_cerr_write = Term.cerr.write
138 143 self.__old_stdout = sys.stdout
139 144 self.__old_stderr= sys.stderr
140 145 self.__old_help_output = pydoc.help.output
141 146 self.__old_display_hook = sys.displayhook
142 147
143 148
144 149 def capture_output(self):
145 150 """ Capture all the output mechanisms we can think of.
146 151 """
147 152 self.save_output_hooks()
148 153 Term.cout.write = self.write
149 154 Term.cerr.write = self.write
150 155 sys.stdout = Term.cout
151 156 sys.stderr = Term.cerr
152 157 pydoc.help.output = self.shell.output_trap.out
153 158
154 159
155 160 def release_output(self):
156 161 """ Release all the different captures we have made.
157 162 """
158 163 Term.cout.write = self.__old_cout_write
159 164 Term.cerr.write = self.__old_cerr_write
160 165 sys.stdout = self.__old_stdout
161 166 sys.stderr = self.__old_stderr
162 167 pydoc.help.output = self.__old_help_output
163 168 sys.displayhook = self.__old_display_hook
164 169
165 170
166 171 def complete(self, line):
172 # FIXME: This should be factored out in the linefrontendbase
173 # method.
167 174 word = line.split('\n')[-1].split(' ')[-1]
168 175 completions = self.ipython0.complete(word)
169 176 # FIXME: The proper sort should be done in the complete method.
170 177 key = lambda x: x.replace('_', '')
171 178 completions.sort(key=key)
172 179 if completions:
173 180 prefix = common_prefix(completions)
174 181 line = line[:-len(word)] + prefix
175 182 return line, completions
176 183
177 184
178 185 #--------------------------------------------------------------------------
179 186 # LineFrontEndBase interface
180 187 #--------------------------------------------------------------------------
181 188
182 189 def prefilter_input(self, input_string):
183 190 """ Using IPython0 to prefilter the commands to turn them
184 191 in executable statements that are valid Python strings.
185 192 """
186 193 input_string = LineFrontEndBase.prefilter_input(self, input_string)
187 194 filtered_lines = []
188 195 # The IPython0 prefilters sometime produce output. We need to
189 196 # capture it.
190 197 self.capture_output()
191 198 self.last_result = dict(number=self.prompt_number)
199
200 ## try:
201 ## for line in input_string.split('\n'):
202 ## filtered_lines.append(
203 ## self.ipython0.prefilter(line, False).rstrip())
204 ## except:
205 ## # XXX: probably not the right thing to do.
206 ## self.ipython0.showsyntaxerror()
207 ## self.after_execute()
208 ## finally:
209 ## self.release_output()
210
211
192 212 try:
193 for line in input_string.split('\n'):
194 filtered_lines.append(
195 self.ipython0.prefilter(line, False).rstrip())
196 except:
197 # XXX: probably not the right thing to do.
198 self.ipython0.showsyntaxerror()
199 self.after_execute()
213 try:
214 for line in input_string.split('\n'):
215 filtered_lines.append(
216 self.ipython0.prefilter(line, False).rstrip())
217 except:
218 # XXX: probably not the right thing to do.
219 self.ipython0.showsyntaxerror()
220 self.after_execute()
200 221 finally:
201 222 self.release_output()
202 223
224
225
203 226 # Clean up the trailing whitespace, to avoid indentation errors
204 227 filtered_string = '\n'.join(filtered_lines)
205 228 return filtered_string
206 229
207 230
208 231 #--------------------------------------------------------------------------
209 232 # PrefilterFrontEnd interface
210 233 #--------------------------------------------------------------------------
211 234
212 235 def system_call(self, command_string):
213 236 """ Allows for frontend to define their own system call, to be
214 237 able capture output and redirect input.
215 238 """
216 239 return os.system(command_string)
217 240
218 241
219 242 def do_exit(self):
220 243 """ Exit the shell, cleanup and save the history.
221 244 """
222 245 self.ipython0.atexit_operations()
223 246
@@ -1,152 +1,32
1 1 # encoding: utf-8
2
3 """This file contains unittests for the frontendbase module."""
2 """
3 Test the basic functionality of frontendbase.
4 """
4 5
5 6 __docformat__ = "restructuredtext en"
6 7
7 #---------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
9 #
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
12 #---------------------------------------------------------------------------
13
14 #---------------------------------------------------------------------------
15 # Imports
16 #---------------------------------------------------------------------------
8 #-------------------------------------------------------------------------------
9 # Copyright (C) 2008 The IPython Development Team
10 #
11 # Distributed under the terms of the BSD License. The full license is
12 # in the file COPYING, distributed as part of this software.
13 #-------------------------------------------------------------------------------
17 14
18 import unittest
19 from IPython.frontend.asyncfrontendbase import AsyncFrontEndBase
20 from IPython.frontend import frontendbase
21 from IPython.kernel.engineservice import EngineService
15 from IPython.frontend.frontendbase import FrontEndBase
22 16
23 class FrontEndCallbackChecker(AsyncFrontEndBase):
24 """FrontEndBase subclass for checking callbacks"""
25 def __init__(self, engine=None, history=None):
26 super(FrontEndCallbackChecker, self).__init__(engine=engine,
27 history=history)
28 self.updateCalled = False
29 self.renderResultCalled = False
30 self.renderErrorCalled = False
31
32 def update_cell_prompt(self, result, blockID=None):
33 self.updateCalled = True
34 return result
35
36 def render_result(self, result):
37 self.renderResultCalled = True
38 return result
39
40
41 def render_error(self, failure):
42 self.renderErrorCalled = True
43 return failure
44
17 def test_iscomplete():
18 """ Check that is_complete works.
19 """
20 f = FrontEndBase()
21 assert f.is_complete('(a + a)')
22 assert not f.is_complete('(a + a')
23 assert f.is_complete('1')
24 assert not f.is_complete('1 + ')
25 assert not f.is_complete('1 + \n\n')
26 assert f.is_complete('if True:\n print 1\n')
27 assert not f.is_complete('if True:\n print 1')
28 assert f.is_complete('def f():\n print 1\n')
45 29
30 if __name__ == '__main__':
31 test_iscomplete()
46 32
47
48 class TestAsyncFrontendBase(unittest.TestCase):
49 def setUp(self):
50 """Setup the EngineService and FrontEndBase"""
51
52 self.fb = FrontEndCallbackChecker(engine=EngineService())
53
54
55 def test_implements_IFrontEnd(self):
56 assert(frontendbase.IFrontEnd.implementedBy(
57 AsyncFrontEndBase))
58
59
60 def test_is_complete_returns_False_for_incomplete_block(self):
61 """"""
62
63 block = """def test(a):"""
64
65 assert(self.fb.is_complete(block) == False)
66
67 def test_is_complete_returns_True_for_complete_block(self):
68 """"""
69
70 block = """def test(a): pass"""
71
72 assert(self.fb.is_complete(block))
73
74 block = """a=3"""
75
76 assert(self.fb.is_complete(block))
77
78
79 def test_blockID_added_to_result(self):
80 block = """3+3"""
81
82 d = self.fb.execute(block, blockID='TEST_ID')
83
84 d.addCallback(self.checkBlockID, expected='TEST_ID')
85
86 def test_blockID_added_to_failure(self):
87 block = "raise Exception()"
88
89 d = self.fb.execute(block,blockID='TEST_ID')
90 d.addErrback(self.checkFailureID, expected='TEST_ID')
91
92 def checkBlockID(self, result, expected=""):
93 assert(result['blockID'] == expected)
94
95
96 def checkFailureID(self, failure, expected=""):
97 assert(failure.blockID == expected)
98
99
100 def test_callbacks_added_to_execute(self):
101 """test that
102 update_cell_prompt
103 render_result
104
105 are added to execute request
106 """
107
108 d = self.fb.execute("10+10")
109 d.addCallback(self.checkCallbacks)
110
111
112 def checkCallbacks(self, result):
113 assert(self.fb.updateCalled)
114 assert(self.fb.renderResultCalled)
115
116
117 def test_error_callback_added_to_execute(self):
118 """test that render_error called on execution error"""
119
120 d = self.fb.execute("raise Exception()")
121 d.addCallback(self.checkRenderError)
122
123 def checkRenderError(self, result):
124 assert(self.fb.renderErrorCalled)
125
126 def test_history_returns_expected_block(self):
127 """Make sure history browsing doesn't fail"""
128
129 blocks = ["a=1","a=2","a=3"]
130 for b in blocks:
131 d = self.fb.execute(b)
132
133 # d is now the deferred for the last executed block
134 d.addCallback(self.historyTests, blocks)
135
136
137 def historyTests(self, result, blocks):
138 """historyTests"""
139
140 assert(len(blocks) >= 3)
141 assert(self.fb.get_history_previous("") == blocks[-2])
142 assert(self.fb.get_history_previous("") == blocks[-3])
143 assert(self.fb.get_history_next() == blocks[-2])
144
145
146 def test_history_returns_none_at_startup(self):
147 """test_history_returns_none_at_startup"""
148
149 assert(self.fb.get_history_previous("")==None)
150 assert(self.fb.get_history_next()==None)
151
152
@@ -1,63 +1,67
1 1 # encoding: utf-8
2 2 """
3 3 Test process execution and IO redirection.
4 4 """
5 5
6 6 __docformat__ = "restructuredtext en"
7 7
8 8 #-------------------------------------------------------------------------------
9 9 # Copyright (C) 2008 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is
12 12 # in the file COPYING, distributed as part of this software.
13 13 #-------------------------------------------------------------------------------
14 14
15 15 from cStringIO import StringIO
16 16 from time import sleep
17 17 import sys
18 18
19 19 from IPython.frontend._process import PipedProcess
20 from IPython.testing import decorators as testdec
21
20 22
21 23 def test_capture_out():
22 24 """ A simple test to see if we can execute a process and get the output.
23 25 """
24 26 s = StringIO()
25 27 p = PipedProcess('echo 1', out_callback=s.write, )
26 28 p.start()
27 29 p.join()
28 assert s.getvalue() == '1\n'
30 result = s.getvalue().rstrip()
31 assert result == '1'
29 32
30 33
31 34 def test_io():
32 35 """ Checks that we can send characters on stdin to the process.
33 36 """
34 37 s = StringIO()
35 38 p = PipedProcess(sys.executable + ' -c "a = raw_input(); print a"',
36 39 out_callback=s.write, )
37 40 p.start()
38 41 test_string = '12345\n'
39 42 while not hasattr(p, 'process'):
40 43 sleep(0.1)
41 44 p.process.stdin.write(test_string)
42 45 p.join()
43 assert s.getvalue() == test_string
46 result = s.getvalue()
47 assert result == test_string
44 48
45 49
46 50 def test_kill():
47 51 """ Check that we can kill a process, and its subprocess.
48 52 """
49 53 s = StringIO()
50 54 p = PipedProcess(sys.executable + ' -c "a = raw_input();"',
51 55 out_callback=s.write, )
52 56 p.start()
53 57 while not hasattr(p, 'process'):
54 58 sleep(0.1)
55 59 p.process.kill()
56 60 assert p.process.poll() is not None
57 61
58 62
59 63 if __name__ == '__main__':
60 64 test_capture_out()
61 65 test_io()
62 66 test_kill()
63 67
@@ -1,428 +1,436
1 1 # encoding: utf-8
2 2 """
3 3 A Wx widget to act as a console and input commands.
4 4
5 5 This widget deals with prompts and provides an edit buffer
6 6 restricted to after the last prompt.
7 7 """
8 8
9 9 __docformat__ = "restructuredtext en"
10 10
11 11 #-------------------------------------------------------------------------------
12 12 # Copyright (C) 2008 The IPython Development Team
13 13 #
14 14 # Distributed under the terms of the BSD License. The full license is
15 15 # in the file COPYING, distributed as part of this software.
16 16 #-------------------------------------------------------------------------------
17 17
18 18 #-------------------------------------------------------------------------------
19 19 # Imports
20 20 #-------------------------------------------------------------------------------
21 21
22 22 import wx
23 23 import wx.stc as stc
24 24
25 25 from wx.py import editwindow
26 import time
26 27 import sys
27 28 LINESEP = '\n'
28 29 if sys.platform == 'win32':
29 30 LINESEP = '\n\r'
30 31
31 32 import re
32 33
33 34 # FIXME: Need to provide an API for non user-generated display on the
34 35 # screen: this should not be editable by the user.
35 36
36 37 _DEFAULT_SIZE = 10
37 38 if sys.platform == 'darwin':
38 _DEFAULT_STYLE = 12
39 _DEFAULT_SIZE = 12
39 40
40 41 _DEFAULT_STYLE = {
41 42 'stdout' : 'fore:#0000FF',
42 43 'stderr' : 'fore:#007f00',
43 44 'trace' : 'fore:#FF0000',
44 45
45 46 'default' : 'size:%d' % _DEFAULT_SIZE,
46 47 'bracegood' : 'fore:#00AA00,back:#000000,bold',
47 48 'bracebad' : 'fore:#FF0000,back:#000000,bold',
48 49
49 50 # properties for the various Python lexer styles
50 51 'comment' : 'fore:#007F00',
51 52 'number' : 'fore:#007F7F',
52 53 'string' : 'fore:#7F007F,italic',
53 54 'char' : 'fore:#7F007F,italic',
54 55 'keyword' : 'fore:#00007F,bold',
55 56 'triple' : 'fore:#7F0000',
56 57 'tripledouble' : 'fore:#7F0000',
57 58 'class' : 'fore:#0000FF,bold,underline',
58 59 'def' : 'fore:#007F7F,bold',
59 60 'operator' : 'bold'
60 61 }
61 62
62 63 # new style numbers
63 64 _STDOUT_STYLE = 15
64 65 _STDERR_STYLE = 16
65 66 _TRACE_STYLE = 17
66 67
67 68
68 69 # system colors
69 70 #SYS_COLOUR_BACKGROUND = wx.SystemSettings.GetColour(wx.SYS_COLOUR_BACKGROUND)
70 71
71 72 #-------------------------------------------------------------------------------
72 73 # The console widget class
73 74 #-------------------------------------------------------------------------------
74 75 class ConsoleWidget(editwindow.EditWindow):
75 76 """ Specialized styled text control view for console-like workflow.
76 77
77 78 This widget is mainly interested in dealing with the prompt and
78 79 keeping the cursor inside the editing line.
79 80 """
80 81
81 82 # This is where the title captured from the ANSI escape sequences are
82 83 # stored.
83 84 title = 'Console'
84 85
85 86 # The buffer being edited.
86 87 def _set_input_buffer(self, string):
87 88 self.SetSelection(self.current_prompt_pos, self.GetLength())
88 89 self.ReplaceSelection(string)
89 90 self.GotoPos(self.GetLength())
90 91
91 92 def _get_input_buffer(self):
92 93 """ Returns the text in current edit buffer.
93 94 """
94 95 input_buffer = self.GetTextRange(self.current_prompt_pos,
95 96 self.GetLength())
96 97 input_buffer = input_buffer.replace(LINESEP, '\n')
97 98 return input_buffer
98 99
99 100 input_buffer = property(_get_input_buffer, _set_input_buffer)
100 101
101 102 style = _DEFAULT_STYLE.copy()
102 103
103 104 # Translation table from ANSI escape sequences to color. Override
104 105 # this to specify your colors.
105 106 ANSI_STYLES = {'0;30': [0, 'BLACK'], '0;31': [1, 'RED'],
106 107 '0;32': [2, 'GREEN'], '0;33': [3, 'BROWN'],
107 108 '0;34': [4, 'BLUE'], '0;35': [5, 'PURPLE'],
108 109 '0;36': [6, 'CYAN'], '0;37': [7, 'LIGHT GREY'],
109 110 '1;30': [8, 'DARK GREY'], '1;31': [9, 'RED'],
110 111 '1;32': [10, 'SEA GREEN'], '1;33': [11, 'YELLOW'],
111 112 '1;34': [12, 'LIGHT BLUE'], '1;35':
112 113 [13, 'MEDIUM VIOLET RED'],
113 114 '1;36': [14, 'LIGHT STEEL BLUE'], '1;37': [15, 'YELLOW']}
114 115
115 116 # The color of the carret (call _apply_style() after setting)
116 117 carret_color = 'BLACK'
117 118
119 # Store the last time a refresh was done
120 _last_refresh_time = 0
121
118 122 #--------------------------------------------------------------------------
119 123 # Public API
120 124 #--------------------------------------------------------------------------
121 125
122 126 def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition,
123 size=wx.DefaultSize, style=0, ):
127 size=wx.DefaultSize, style=wx.WANTS_CHARS, ):
124 128 editwindow.EditWindow.__init__(self, parent, id, pos, size, style)
125 129 self._configure_scintilla()
126 130
127 131 self.Bind(wx.EVT_KEY_DOWN, self._on_key_down)
128 132 self.Bind(wx.EVT_KEY_UP, self._on_key_up)
129 133
130 134
131 135 def write(self, text, refresh=True):
132 136 """ Write given text to buffer, while translating the ansi escape
133 137 sequences.
134 138 """
135 139 # XXX: do not put print statements to sys.stdout/sys.stderr in
136 140 # this method, the print statements will call this method, as
137 141 # you will end up with an infinit loop
138 142 title = self.title_pat.split(text)
139 143 if len(title)>1:
140 144 self.title = title[-2]
141 145
142 146 text = self.title_pat.sub('', text)
143 147 segments = self.color_pat.split(text)
144 148 segment = segments.pop(0)
145 149 self.GotoPos(self.GetLength())
146 150 self.StartStyling(self.GetLength(), 0xFF)
147 151 try:
148 152 self.AppendText(segment)
149 153 except UnicodeDecodeError:
150 154 # XXX: Do I really want to skip the exception?
151 155 pass
152 156
153 157 if segments:
154 158 for ansi_tag, text in zip(segments[::2], segments[1::2]):
155 159 self.StartStyling(self.GetLength(), 0xFF)
156 160 try:
157 161 self.AppendText(text)
158 162 except UnicodeDecodeError:
159 163 # XXX: Do I really want to skip the exception?
160 164 pass
161 165
162 166 if ansi_tag not in self.ANSI_STYLES:
163 167 style = 0
164 168 else:
165 169 style = self.ANSI_STYLES[ansi_tag][0]
166 170
167 171 self.SetStyling(len(text), style)
168 172
169 173 self.GotoPos(self.GetLength())
170 174 if refresh:
171 # Maybe this is faster than wx.Yield()
172 self.ProcessEvent(wx.PaintEvent())
173 #wx.Yield()
175 current_time = time.time()
176 if current_time - self._last_refresh_time > 0.03:
177 if sys.platform == 'win32':
178 wx.SafeYield()
179 else:
180 wx.Yield()
181 # self.ProcessEvent(wx.PaintEvent())
182 self._last_refresh_time = current_time
174 183
175 184
176 185 def new_prompt(self, prompt):
177 186 """ Prints a prompt at start of line, and move the start of the
178 187 current block there.
179 188
180 189 The prompt can be given with ascii escape sequences.
181 190 """
182 191 self.write(prompt, refresh=False)
183 192 # now we update our cursor giving end of prompt
184 193 self.current_prompt_pos = self.GetLength()
185 194 self.current_prompt_line = self.GetCurrentLine()
186 wx.Yield()
187 195 self.EnsureCaretVisible()
188 196
189 197
190 198 def scroll_to_bottom(self):
191 199 maxrange = self.GetScrollRange(wx.VERTICAL)
192 200 self.ScrollLines(maxrange)
193 201
194 202
195 203 def pop_completion(self, possibilities, offset=0):
196 204 """ Pops up an autocompletion menu. Offset is the offset
197 205 in characters of the position at which the menu should
198 206 appear, relativ to the cursor.
199 207 """
200 208 self.AutoCompSetIgnoreCase(False)
201 209 self.AutoCompSetAutoHide(False)
202 210 self.AutoCompSetMaxHeight(len(possibilities))
203 211 self.AutoCompShow(offset, " ".join(possibilities))
204 212
205 213
206 214 def get_line_width(self):
207 215 """ Return the width of the line in characters.
208 216 """
209 217 return self.GetSize()[0]/self.GetCharWidth()
210 218
211 219 #--------------------------------------------------------------------------
212 220 # EditWindow API
213 221 #--------------------------------------------------------------------------
214 222
215 223 def OnUpdateUI(self, event):
216 224 """ Override the OnUpdateUI of the EditWindow class, to prevent
217 225 syntax highlighting both for faster redraw, and for more
218 226 consistent look and feel.
219 227 """
220 228
221 229 #--------------------------------------------------------------------------
222 230 # Private API
223 231 #--------------------------------------------------------------------------
224 232
225 233 def _apply_style(self):
226 234 """ Applies the colors for the different text elements and the
227 235 carret.
228 236 """
229 237 self.SetCaretForeground(self.carret_color)
230 238
231 239 #self.StyleClearAll()
232 240 self.StyleSetSpec(stc.STC_STYLE_BRACELIGHT,
233 241 "fore:#FF0000,back:#0000FF,bold")
234 242 self.StyleSetSpec(stc.STC_STYLE_BRACEBAD,
235 243 "fore:#000000,back:#FF0000,bold")
236 244
237 245 for style in self.ANSI_STYLES.values():
238 246 self.StyleSetSpec(style[0], "bold,fore:%s" % style[1])
239 247
240 248
241 249 def _configure_scintilla(self):
242 250 self.SetEOLMode(stc.STC_EOL_LF)
243 251
244 252 # Ctrl"+" or Ctrl "-" can be used to zoomin/zoomout the text inside
245 253 # the widget
246 254 self.CmdKeyAssign(ord('+'), stc.STC_SCMOD_CTRL, stc.STC_CMD_ZOOMIN)
247 255 self.CmdKeyAssign(ord('-'), stc.STC_SCMOD_CTRL, stc.STC_CMD_ZOOMOUT)
248 256 # Also allow Ctrl Shift "=" for poor non US keyboard users.
249 257 self.CmdKeyAssign(ord('='), stc.STC_SCMOD_CTRL|stc.STC_SCMOD_SHIFT,
250 258 stc.STC_CMD_ZOOMIN)
251 259
252 260 # Keys: we need to clear some of the keys the that don't play
253 261 # well with a console.
254 262 self.CmdKeyClear(ord('D'), stc.STC_SCMOD_CTRL)
255 263 self.CmdKeyClear(ord('L'), stc.STC_SCMOD_CTRL)
256 264 self.CmdKeyClear(ord('T'), stc.STC_SCMOD_CTRL)
257 265 self.CmdKeyClear(ord('A'), stc.STC_SCMOD_CTRL)
258 266
259 267 self.SetEOLMode(stc.STC_EOL_CRLF)
260 268 self.SetWrapMode(stc.STC_WRAP_CHAR)
261 269 self.SetWrapMode(stc.STC_WRAP_WORD)
262 270 self.SetBufferedDraw(True)
263 271 self.SetUseAntiAliasing(True)
264 272 self.SetLayoutCache(stc.STC_CACHE_PAGE)
265 273 self.SetUndoCollection(False)
266 274 self.SetUseTabs(True)
267 275 self.SetIndent(4)
268 276 self.SetTabWidth(4)
269 277
270 278 # we don't want scintilla's autocompletion to choose
271 279 # automaticaly out of a single choice list, as we pop it up
272 280 # automaticaly
273 281 self.AutoCompSetChooseSingle(False)
274 282 self.AutoCompSetMaxHeight(10)
275 283 # XXX: this doesn't seem to have an effect.
276 284 self.AutoCompSetFillUps('\n')
277 285
278 286 self.SetMargins(3, 3) #text is moved away from border with 3px
279 287 # Suppressing Scintilla margins
280 288 self.SetMarginWidth(0, 0)
281 289 self.SetMarginWidth(1, 0)
282 290 self.SetMarginWidth(2, 0)
283 291
284 292 self._apply_style()
285 293
286 294 # Xterm escape sequences
287 295 self.color_pat = re.compile('\x01?\x1b\[(.*?)m\x02?')
288 296 self.title_pat = re.compile('\x1b]0;(.*?)\x07')
289 297
290 298 #self.SetEdgeMode(stc.STC_EDGE_LINE)
291 299 #self.SetEdgeColumn(80)
292 300
293 301 # styles
294 302 p = self.style
295 303 self.StyleSetSpec(stc.STC_STYLE_DEFAULT, p['default'])
296 304 self.StyleClearAll()
297 305 self.StyleSetSpec(_STDOUT_STYLE, p['stdout'])
298 306 self.StyleSetSpec(_STDERR_STYLE, p['stderr'])
299 307 self.StyleSetSpec(_TRACE_STYLE, p['trace'])
300 308
301 309 self.StyleSetSpec(stc.STC_STYLE_BRACELIGHT, p['bracegood'])
302 310 self.StyleSetSpec(stc.STC_STYLE_BRACEBAD, p['bracebad'])
303 311 self.StyleSetSpec(stc.STC_P_COMMENTLINE, p['comment'])
304 312 self.StyleSetSpec(stc.STC_P_NUMBER, p['number'])
305 313 self.StyleSetSpec(stc.STC_P_STRING, p['string'])
306 314 self.StyleSetSpec(stc.STC_P_CHARACTER, p['char'])
307 315 self.StyleSetSpec(stc.STC_P_WORD, p['keyword'])
308 316 self.StyleSetSpec(stc.STC_P_WORD2, p['keyword'])
309 317 self.StyleSetSpec(stc.STC_P_TRIPLE, p['triple'])
310 318 self.StyleSetSpec(stc.STC_P_TRIPLEDOUBLE, p['tripledouble'])
311 319 self.StyleSetSpec(stc.STC_P_CLASSNAME, p['class'])
312 320 self.StyleSetSpec(stc.STC_P_DEFNAME, p['def'])
313 321 self.StyleSetSpec(stc.STC_P_OPERATOR, p['operator'])
314 322 self.StyleSetSpec(stc.STC_P_COMMENTBLOCK, p['comment'])
315 323
316 324 def _on_key_down(self, event, skip=True):
317 325 """ Key press callback used for correcting behavior for
318 326 console-like interfaces: the cursor is constraint to be after
319 327 the last prompt.
320 328
321 329 Return True if event as been catched.
322 330 """
323 331 catched = True
324 332 # Intercept some specific keys.
325 333 if event.KeyCode == ord('L') and event.ControlDown() :
326 334 self.scroll_to_bottom()
327 335 elif event.KeyCode == ord('K') and event.ControlDown() :
328 336 self.input_buffer = ''
329 337 elif event.KeyCode == ord('A') and event.ControlDown() :
330 338 self.GotoPos(self.GetLength())
331 339 self.SetSelectionStart(self.current_prompt_pos)
332 340 self.SetSelectionEnd(self.GetCurrentPos())
333 341 catched = True
334 342 elif event.KeyCode == ord('E') and event.ControlDown() :
335 343 self.GotoPos(self.GetLength())
336 344 catched = True
337 345 elif event.KeyCode == wx.WXK_PAGEUP:
338 346 self.ScrollPages(-1)
339 347 elif event.KeyCode == wx.WXK_PAGEDOWN:
340 348 self.ScrollPages(1)
341 349 elif event.KeyCode == wx.WXK_UP and event.ShiftDown():
342 350 self.ScrollLines(-1)
343 351 elif event.KeyCode == wx.WXK_DOWN and event.ShiftDown():
344 352 self.ScrollLines(1)
345 353 else:
346 354 catched = False
347 355
348 356 if self.AutoCompActive():
349 357 event.Skip()
350 358 else:
351 359 if event.KeyCode in (13, wx.WXK_NUMPAD_ENTER) and \
352 360 event.Modifiers in (wx.MOD_NONE, wx.MOD_WIN):
353 361 catched = True
354 362 self.CallTipCancel()
355 363 self.write('\n', refresh=False)
356 364 # Under windows scintilla seems to be doing funny stuff to the
357 365 # line returns here, but the getter for input_buffer filters
358 366 # this out.
359 367 if sys.platform == 'win32':
360 368 self.input_buffer = self.input_buffer
361 369 self._on_enter()
362 370
363 371 elif event.KeyCode == wx.WXK_HOME:
364 372 if event.Modifiers in (wx.MOD_NONE, wx.MOD_WIN):
365 373 self.GotoPos(self.current_prompt_pos)
366 374 catched = True
367 375
368 376 elif event.Modifiers == wx.MOD_SHIFT:
369 377 # FIXME: This behavior is not ideal: if the selection
370 378 # is already started, it will jump.
371 379 self.SetSelectionStart(self.current_prompt_pos)
372 380 self.SetSelectionEnd(self.GetCurrentPos())
373 381 catched = True
374 382
375 383 elif event.KeyCode == wx.WXK_UP:
376 384 if self.GetCurrentLine() > self.current_prompt_line:
377 385 if self.GetCurrentLine() == self.current_prompt_line + 1 \
378 386 and self.GetColumn(self.GetCurrentPos()) < \
379 387 self.GetColumn(self.current_prompt_pos):
380 388 self.GotoPos(self.current_prompt_pos)
381 389 else:
382 390 event.Skip()
383 391 catched = True
384 392
385 393 elif event.KeyCode in (wx.WXK_LEFT, wx.WXK_BACK):
386 394 if self.GetCurrentPos() > self.current_prompt_pos:
387 395 event.Skip()
388 396 catched = True
389 397
390 398 if skip and not catched:
391 399 # Put the cursor back in the edit region
392 400 if self.GetCurrentPos() < self.current_prompt_pos:
393 401 self.GotoPos(self.current_prompt_pos)
394 402 else:
395 403 event.Skip()
396 404
397 405 return catched
398 406
399 407
400 408 def _on_key_up(self, event, skip=True):
401 409 """ If cursor is outside the editing region, put it back.
402 410 """
403 411 event.Skip()
404 412 if self.GetCurrentPos() < self.current_prompt_pos:
405 413 self.GotoPos(self.current_prompt_pos)
406 414
407 415
408 416
409 417 if __name__ == '__main__':
410 418 # Some simple code to test the console widget.
411 419 class MainWindow(wx.Frame):
412 420 def __init__(self, parent, id, title):
413 421 wx.Frame.__init__(self, parent, id, title, size=(300,250))
414 422 self._sizer = wx.BoxSizer(wx.VERTICAL)
415 423 self.console_widget = ConsoleWidget(self)
416 424 self._sizer.Add(self.console_widget, 1, wx.EXPAND)
417 425 self.SetSizer(self._sizer)
418 426 self.SetAutoLayout(1)
419 427 self.Show(True)
420 428
421 429 app = wx.PySimpleApp()
422 430 w = MainWindow(None, wx.ID_ANY, 'ConsoleWidget')
423 431 w.SetSize((780, 460))
424 432 w.Show()
425 433
426 434 app.MainLoop()
427 435
428 436
@@ -1,510 +1,526
1 1 # encoding: utf-8 -*- test-case-name:
2 2 # FIXME: Need to add tests.
3 3 # ipython1.frontend.wx.tests.test_wx_frontend -*-
4 4
5 5 """Classes to provide a Wx frontend to the
6 6 IPython.kernel.core.interpreter.
7 7
8 8 This class inherits from ConsoleWidget, that provides a console-like
9 9 widget to provide a text-rendering widget suitable for a terminal.
10 10 """
11 11
12 12 __docformat__ = "restructuredtext en"
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Copyright (C) 2008 The IPython Development Team
16 16 #
17 17 # Distributed under the terms of the BSD License. The full license is in
18 18 # the file COPYING, distributed as part of this software.
19 19 #-------------------------------------------------------------------------------
20 20
21 21 #-------------------------------------------------------------------------------
22 22 # Imports
23 23 #-------------------------------------------------------------------------------
24 24
25 25 # Major library imports
26 26 import re
27 27 import __builtin__
28 28 from time import sleep
29 29 import sys
30 30 from threading import Lock
31 31 import string
32 32
33 33 import wx
34 34 from wx import stc
35 35
36 36 # Ipython-specific imports.
37 37 from IPython.frontend._process import PipedProcess
38 38 from console_widget import ConsoleWidget
39 39 from IPython.frontend.prefilterfrontend import PrefilterFrontEnd
40 40
41 41 #-------------------------------------------------------------------------------
42 42 # Constants
43 43 #-------------------------------------------------------------------------------
44 44
45 45 _COMPLETE_BUFFER_BG = '#FAFAF1' # Nice green
46 46 _INPUT_BUFFER_BG = '#FDFFD3' # Nice yellow
47 47 _ERROR_BG = '#FFF1F1' # Nice red
48 48
49 49 _COMPLETE_BUFFER_MARKER = 31
50 50 _ERROR_MARKER = 30
51 51 _INPUT_MARKER = 29
52 52
53 53 prompt_in1 = \
54 54 '\n\x01\x1b[0;34m\x02In [\x01\x1b[1;34m\x02$number\x01\x1b[0;34m\x02]: \x01\x1b[0m\x02'
55 55
56 56 prompt_out = \
57 57 '\x01\x1b[0;31m\x02Out[\x01\x1b[1;31m\x02$number\x01\x1b[0;31m\x02]: \x01\x1b[0m\x02'
58 58
59 59 #-------------------------------------------------------------------------------
60 60 # Classes to implement the Wx frontend
61 61 #-------------------------------------------------------------------------------
62 62 class WxController(ConsoleWidget, PrefilterFrontEnd):
63 63 """Classes to provide a Wx frontend to the
64 64 IPython.kernel.core.interpreter.
65 65
66 66 This class inherits from ConsoleWidget, that provides a console-like
67 67 widget to provide a text-rendering widget suitable for a terminal.
68 68 """
69 69
70 70 output_prompt_template = string.Template(prompt_out)
71 71
72 72 input_prompt_template = string.Template(prompt_in1)
73 73
74 74 # Print debug info on what is happening to the console.
75 75 debug = False
76 76
77 77 # The title of the terminal, as captured through the ANSI escape
78 78 # sequences.
79 79 def _set_title(self, title):
80 80 return self.Parent.SetTitle(title)
81 81
82 82 def _get_title(self):
83 83 return self.Parent.GetTitle()
84 84
85 85 title = property(_get_title, _set_title)
86 86
87 87
88 88 # The buffer being edited.
89 89 # We are duplicating the definition here because of multiple
90 90 # inheritence
91 91 def _set_input_buffer(self, string):
92 92 ConsoleWidget._set_input_buffer(self, string)
93 93 self._colorize_input_buffer()
94 94
95 95 def _get_input_buffer(self):
96 96 """ Returns the text in current edit buffer.
97 97 """
98 98 return ConsoleWidget._get_input_buffer(self)
99 99
100 100 input_buffer = property(_get_input_buffer, _set_input_buffer)
101 101
102 102
103 103 #--------------------------------------------------------------------------
104 104 # Private Attributes
105 105 #--------------------------------------------------------------------------
106 106
107 107 # A flag governing the behavior of the input. Can be:
108 108 #
109 109 # 'readline' for readline-like behavior with a prompt
110 110 # and an edit buffer.
111 111 # 'raw_input' similar to readline, but triggered by a raw-input
112 112 # call. Can be used by subclasses to act differently.
113 113 # 'subprocess' for sending the raw input directly to a
114 114 # subprocess.
115 115 # 'buffering' for buffering of the input, that will be used
116 116 # when the input state switches back to another state.
117 117 _input_state = 'readline'
118 118
119 119 # Attribute to store reference to the pipes of a subprocess, if we
120 120 # are running any.
121 121 _running_process = False
122 122
123 123 # A queue for writing fast streams to the screen without flooding the
124 124 # event loop
125 125 _out_buffer = []
126 126
127 127 # A lock to lock the _out_buffer to make sure we don't empty it
128 128 # while it is being swapped
129 129 _out_buffer_lock = Lock()
130 130
131 # The different line markers used to higlight the prompts.
131 132 _markers = dict()
132 133
133 134 #--------------------------------------------------------------------------
134 135 # Public API
135 136 #--------------------------------------------------------------------------
136 137
137 138 def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition,
138 size=wx.DefaultSize, style=wx.CLIP_CHILDREN,
139 size=wx.DefaultSize,
140 style=wx.CLIP_CHILDREN|wx.WANTS_CHARS,
139 141 *args, **kwds):
140 142 """ Create Shell instance.
141 143 """
142 144 ConsoleWidget.__init__(self, parent, id, pos, size, style)
143 145 PrefilterFrontEnd.__init__(self, **kwds)
146
147 # Stick in our own raw_input:
148 self.ipython0.raw_input = self.raw_input
144 149
145 150 # Marker for complete buffer.
146 151 self.MarkerDefine(_COMPLETE_BUFFER_MARKER, stc.STC_MARK_BACKGROUND,
147 152 background=_COMPLETE_BUFFER_BG)
148 153 # Marker for current input buffer.
149 154 self.MarkerDefine(_INPUT_MARKER, stc.STC_MARK_BACKGROUND,
150 155 background=_INPUT_BUFFER_BG)
151 156 # Marker for tracebacks.
152 157 self.MarkerDefine(_ERROR_MARKER, stc.STC_MARK_BACKGROUND,
153 158 background=_ERROR_BG)
154 159
155 160 # A time for flushing the write buffer
156 161 BUFFER_FLUSH_TIMER_ID = 100
157 162 self._buffer_flush_timer = wx.Timer(self, BUFFER_FLUSH_TIMER_ID)
158 163 wx.EVT_TIMER(self, BUFFER_FLUSH_TIMER_ID, self._buffer_flush)
159 164
160 165 if 'debug' in kwds:
161 166 self.debug = kwds['debug']
162 167 kwds.pop('debug')
163 168
164 169 # Inject self in namespace, for debug
165 170 if self.debug:
166 171 self.shell.user_ns['self'] = self
172 # Inject our own raw_input in namespace
173 self.shell.user_ns['raw_input'] = self.raw_input
167 174
168 175
169 def raw_input(self, prompt):
176 def raw_input(self, prompt=''):
170 177 """ A replacement from python's raw_input.
171 178 """
172 179 self.new_prompt(prompt)
173 180 self._input_state = 'raw_input'
174 181 if hasattr(self, '_cursor'):
175 182 del self._cursor
176 183 self.SetCursor(wx.StockCursor(wx.CURSOR_CROSS))
177 self.waiting = True
178 184 self.__old_on_enter = self._on_enter
185 event_loop = wx.EventLoop()
179 186 def my_on_enter():
180 self.waiting = False
187 event_loop.Exit()
181 188 self._on_enter = my_on_enter
182 # XXX: Busy waiting, ugly.
183 while self.waiting:
184 wx.Yield()
185 sleep(0.1)
189 # XXX: Running a separate event_loop. Ugly.
190 event_loop.Run()
186 191 self._on_enter = self.__old_on_enter
187 192 self._input_state = 'buffering'
188 193 self._cursor = wx.BusyCursor()
189 194 return self.input_buffer.rstrip('\n')
190 195
191 196
192 197 def system_call(self, command_string):
193 198 self._input_state = 'subprocess'
199 event_loop = wx.EventLoop()
200 def _end_system_call():
201 self._input_state = 'buffering'
202 self._running_process = False
203 event_loop.Exit()
204
194 205 self._running_process = PipedProcess(command_string,
195 206 out_callback=self.buffered_write,
196 end_callback = self._end_system_call)
207 end_callback = _end_system_call)
197 208 self._running_process.start()
198 # XXX: another one of these polling loops to have a blocking
199 # call
200 wx.Yield()
201 while self._running_process:
202 wx.Yield()
203 sleep(0.1)
209 # XXX: Running a separate event_loop. Ugly.
210 event_loop.Run()
204 211 # Be sure to flush the buffer.
205 212 self._buffer_flush(event=None)
206 213
207 214
208 215 def do_calltip(self):
209 216 """ Analyse current and displays useful calltip for it.
210 217 """
211 218 if self.debug:
212 219 print >>sys.__stdout__, "do_calltip"
213 220 separators = re.compile('[\s\{\}\[\]\(\)\= ,:]')
214 221 symbol = self.input_buffer
215 222 symbol_string = separators.split(symbol)[-1]
216 223 base_symbol_string = symbol_string.split('.')[0]
217 224 if base_symbol_string in self.shell.user_ns:
218 225 symbol = self.shell.user_ns[base_symbol_string]
219 226 elif base_symbol_string in self.shell.user_global_ns:
220 227 symbol = self.shell.user_global_ns[base_symbol_string]
221 228 elif base_symbol_string in __builtin__.__dict__:
222 229 symbol = __builtin__.__dict__[base_symbol_string]
223 230 else:
224 231 return False
225 232 try:
226 233 for name in symbol_string.split('.')[1:] + ['__doc__']:
227 234 symbol = getattr(symbol, name)
228 235 self.AutoCompCancel()
229 wx.Yield()
230 self.CallTipShow(self.GetCurrentPos(), symbol)
236 # Check that the symbol can indeed be converted to a string:
237 symbol += ''
238 wx.CallAfter(self.CallTipShow, self.GetCurrentPos(), symbol)
231 239 except:
232 240 # The retrieve symbol couldn't be converted to a string
233 241 pass
234 242
235 243
236 244 def _popup_completion(self, create=False):
237 245 """ Updates the popup completion menu if it exists. If create is
238 246 true, open the menu.
239 247 """
240 248 if self.debug:
241 print >>sys.__stdout__, "_popup_completion",
249 print >>sys.__stdout__, "_popup_completion"
242 250 line = self.input_buffer
243 if (self.AutoCompActive() and not line[-1] == '.') \
251 if (self.AutoCompActive() and line and not line[-1] == '.') \
244 252 or create==True:
245 253 suggestion, completions = self.complete(line)
246 254 offset=0
247 255 if completions:
248 256 complete_sep = re.compile('[\s\{\}\[\]\(\)\= ,:]')
249 257 residual = complete_sep.split(line)[-1]
250 258 offset = len(residual)
251 259 self.pop_completion(completions, offset=offset)
252 260 if self.debug:
253 261 print >>sys.__stdout__, completions
254 262
255 263
256 264 def buffered_write(self, text):
257 265 """ A write method for streams, that caches the stream in order
258 266 to avoid flooding the event loop.
259 267
260 268 This can be called outside of the main loop, in separate
261 269 threads.
262 270 """
263 271 self._out_buffer_lock.acquire()
264 272 self._out_buffer.append(text)
265 273 self._out_buffer_lock.release()
266 274 if not self._buffer_flush_timer.IsRunning():
267 275 wx.CallAfter(self._buffer_flush_timer.Start,
268 276 milliseconds=100, oneShot=True)
269 277
270 278
271 279 #--------------------------------------------------------------------------
272 280 # LineFrontEnd interface
273 281 #--------------------------------------------------------------------------
274 282
275 283 def execute(self, python_string, raw_string=None):
276 284 self._input_state = 'buffering'
277 285 self.CallTipCancel()
278 286 self._cursor = wx.BusyCursor()
279 287 if raw_string is None:
280 288 raw_string = python_string
281 289 end_line = self.current_prompt_line \
282 290 + max(1, len(raw_string.split('\n'))-1)
283 291 for i in range(self.current_prompt_line, end_line):
284 292 if i in self._markers:
285 293 self.MarkerDeleteHandle(self._markers[i])
286 294 self._markers[i] = self.MarkerAdd(i, _COMPLETE_BUFFER_MARKER)
287 # Update the display:
288 wx.Yield()
289 self.GotoPos(self.GetLength())
290 PrefilterFrontEnd.execute(self, python_string, raw_string=raw_string)
295 # Use a callafter to update the display robustly under windows
296 def callback():
297 self.GotoPos(self.GetLength())
298 PrefilterFrontEnd.execute(self, python_string,
299 raw_string=raw_string)
300 wx.CallAfter(callback)
291 301
292 302 def save_output_hooks(self):
293 303 self.__old_raw_input = __builtin__.raw_input
294 304 PrefilterFrontEnd.save_output_hooks(self)
295 305
296 306 def capture_output(self):
297 __builtin__.raw_input = self.raw_input
298 307 self.SetLexer(stc.STC_LEX_NULL)
299 308 PrefilterFrontEnd.capture_output(self)
309 __builtin__.raw_input = self.raw_input
300 310
301 311
302 312 def release_output(self):
303 313 __builtin__.raw_input = self.__old_raw_input
304 314 PrefilterFrontEnd.release_output(self)
305 315 self.SetLexer(stc.STC_LEX_PYTHON)
306 316
307 317
308 318 def after_execute(self):
309 319 PrefilterFrontEnd.after_execute(self)
310 320 # Clear the wait cursor
311 321 if hasattr(self, '_cursor'):
312 322 del self._cursor
313 323 self.SetCursor(wx.StockCursor(wx.CURSOR_CHAR))
314 324
315 325
316 326 def show_traceback(self):
317 327 start_line = self.GetCurrentLine()
318 328 PrefilterFrontEnd.show_traceback(self)
319 wx.Yield()
329 self.ProcessEvent(wx.PaintEvent())
330 #wx.Yield()
320 331 for i in range(start_line, self.GetCurrentLine()):
321 332 self._markers[i] = self.MarkerAdd(i, _ERROR_MARKER)
322 333
323 334
324 335 #--------------------------------------------------------------------------
336 # FrontEndBase interface
337 #--------------------------------------------------------------------------
338
339 def render_error(self, e):
340 start_line = self.GetCurrentLine()
341 self.write('\n' + e + '\n')
342 for i in range(start_line, self.GetCurrentLine()):
343 self._markers[i] = self.MarkerAdd(i, _ERROR_MARKER)
344
345
346 #--------------------------------------------------------------------------
325 347 # ConsoleWidget interface
326 348 #--------------------------------------------------------------------------
327 349
328 350 def new_prompt(self, prompt):
329 351 """ Display a new prompt, and start a new input buffer.
330 352 """
331 353 self._input_state = 'readline'
332 354 ConsoleWidget.new_prompt(self, prompt)
333 355 i = self.current_prompt_line
334 356 self._markers[i] = self.MarkerAdd(i, _INPUT_MARKER)
335 357
336 358
337 359 def write(self, *args, **kwargs):
338 360 # Avoid multiple inheritence, be explicit about which
339 361 # parent method class gets called
340 362 ConsoleWidget.write(self, *args, **kwargs)
341 363
342 364
343 365 def _on_key_down(self, event, skip=True):
344 366 """ Capture the character events, let the parent
345 367 widget handle them, and put our logic afterward.
346 368 """
347 369 # FIXME: This method needs to be broken down in smaller ones.
348 370 current_line_number = self.GetCurrentLine()
349 371 if event.KeyCode in (ord('c'), ord('C')) and event.ControlDown():
350 372 # Capture Control-C
351 373 if self._input_state == 'subprocess':
352 374 if self.debug:
353 375 print >>sys.__stderr__, 'Killing running process'
354 self._running_process.process.kill()
376 if hasattr(self._running_process, 'process'):
377 self._running_process.process.kill()
355 378 elif self._input_state == 'buffering':
356 379 if self.debug:
357 380 print >>sys.__stderr__, 'Raising KeyboardInterrupt'
358 381 raise KeyboardInterrupt
359 382 # XXX: We need to make really sure we
360 383 # get back to a prompt.
361 384 elif self._input_state == 'subprocess' and (
362 385 ( event.KeyCode<256 and
363 386 not event.ControlDown() )
364 387 or
365 388 ( event.KeyCode in (ord('d'), ord('D')) and
366 389 event.ControlDown())):
367 390 # We are running a process, we redirect keys.
368 391 ConsoleWidget._on_key_down(self, event, skip=skip)
369 392 char = chr(event.KeyCode)
370 393 # Deal with some inconsistency in wx keycodes:
371 394 if char == '\r':
372 395 char = '\n'
373 396 elif not event.ShiftDown():
374 397 char = char.lower()
375 398 if event.ControlDown() and event.KeyCode in (ord('d'), ord('D')):
376 399 char = '\04'
377 400 self._running_process.process.stdin.write(char)
378 401 self._running_process.process.stdin.flush()
379 elif event.KeyCode in (ord('('), 57):
402 elif event.KeyCode in (ord('('), 57, 53):
380 403 # Calltips
381 404 event.Skip()
382 405 self.do_calltip()
383 406 elif self.AutoCompActive() and not event.KeyCode == ord('\t'):
384 407 event.Skip()
385 408 if event.KeyCode in (wx.WXK_BACK, wx.WXK_DELETE):
386 409 wx.CallAfter(self._popup_completion, create=True)
387 410 elif not event.KeyCode in (wx.WXK_UP, wx.WXK_DOWN, wx.WXK_LEFT,
388 411 wx.WXK_RIGHT, wx.WXK_ESCAPE):
389 412 wx.CallAfter(self._popup_completion)
390 413 else:
391 414 # Up history
392 415 if event.KeyCode == wx.WXK_UP and (
393 416 ( current_line_number == self.current_prompt_line and
394 417 event.Modifiers in (wx.MOD_NONE, wx.MOD_WIN) )
395 418 or event.ControlDown() ):
396 419 new_buffer = self.get_history_previous(
397 420 self.input_buffer)
398 421 if new_buffer is not None:
399 422 self.input_buffer = new_buffer
400 423 if self.GetCurrentLine() > self.current_prompt_line:
401 424 # Go to first line, for seemless history up.
402 425 self.GotoPos(self.current_prompt_pos)
403 426 # Down history
404 427 elif event.KeyCode == wx.WXK_DOWN and (
405 428 ( current_line_number == self.LineCount -1 and
406 429 event.Modifiers in (wx.MOD_NONE, wx.MOD_WIN) )
407 430 or event.ControlDown() ):
408 431 new_buffer = self.get_history_next()
409 432 if new_buffer is not None:
410 433 self.input_buffer = new_buffer
411 434 # Tab-completion
412 435 elif event.KeyCode == ord('\t'):
413 last_line = self.input_buffer.split('\n')[-1]
414 if not re.match(r'^\s*$', last_line):
436 current_line, current_line_number = self.CurLine
437 if not re.match(r'^\s*$', current_line):
415 438 self.complete_current_input()
416 439 if self.AutoCompActive():
417 440 wx.CallAfter(self._popup_completion, create=True)
418 441 else:
419 442 event.Skip()
420 443 else:
421 444 ConsoleWidget._on_key_down(self, event, skip=skip)
422 445
423 446
424 447 def _on_key_up(self, event, skip=True):
425 448 """ Called when any key is released.
426 449 """
427 450 if event.KeyCode in (59, ord('.')):
428 451 # Intercepting '.'
429 452 event.Skip()
430 self._popup_completion(create=True)
453 wx.CallAfter(self._popup_completion, create=True)
431 454 else:
432 455 ConsoleWidget._on_key_up(self, event, skip=skip)
433 456
434 457
435 458 def _on_enter(self):
436 459 """ Called on return key down, in readline input_state.
437 460 """
438 461 if self.debug:
439 462 print >>sys.__stdout__, repr(self.input_buffer)
440 463 PrefilterFrontEnd._on_enter(self)
441 464
442 465
443 466 #--------------------------------------------------------------------------
444 467 # EditWindow API
445 468 #--------------------------------------------------------------------------
446 469
447 470 def OnUpdateUI(self, event):
448 471 """ Override the OnUpdateUI of the EditWindow class, to prevent
449 472 syntax highlighting both for faster redraw, and for more
450 473 consistent look and feel.
451 474 """
452 475 if not self._input_state == 'readline':
453 476 ConsoleWidget.OnUpdateUI(self, event)
454 477
455 478 #--------------------------------------------------------------------------
456 479 # Private API
457 480 #--------------------------------------------------------------------------
458 481
459 def _end_system_call(self):
460 """ Called at the end of a system call.
461 """
462 self._input_state = 'buffering'
463 self._running_process = False
464
465
466 482 def _buffer_flush(self, event):
467 483 """ Called by the timer to flush the write buffer.
468 484
469 485 This is always called in the mainloop, by the wx timer.
470 486 """
471 487 self._out_buffer_lock.acquire()
472 488 _out_buffer = self._out_buffer
473 489 self._out_buffer = []
474 490 self._out_buffer_lock.release()
475 491 self.write(''.join(_out_buffer), refresh=False)
476 492
477 493
478 494 def _colorize_input_buffer(self):
479 495 """ Keep the input buffer lines at a bright color.
480 496 """
481 497 if not self._input_state in ('readline', 'raw_input'):
482 498 return
483 499 end_line = self.GetCurrentLine()
484 500 if not sys.platform == 'win32':
485 501 end_line += 1
486 502 for i in range(self.current_prompt_line, end_line):
487 503 if i in self._markers:
488 504 self.MarkerDeleteHandle(self._markers[i])
489 505 self._markers[i] = self.MarkerAdd(i, _INPUT_MARKER)
490 506
491 507
492 508 if __name__ == '__main__':
493 509 class MainWindow(wx.Frame):
494 510 def __init__(self, parent, id, title):
495 511 wx.Frame.__init__(self, parent, id, title, size=(300,250))
496 512 self._sizer = wx.BoxSizer(wx.VERTICAL)
497 513 self.shell = WxController(self)
498 514 self._sizer.Add(self.shell, 1, wx.EXPAND)
499 515 self.SetSizer(self._sizer)
500 516 self.SetAutoLayout(1)
501 517 self.Show(True)
502 518
503 519 app = wx.PySimpleApp()
504 520 frame = MainWindow(None, wx.ID_ANY, 'Ipython')
505 521 frame.shell.SetFocus()
506 522 frame.SetSize((680, 460))
507 523 self = frame.shell
508 524
509 525 app.MainLoop()
510 526
@@ -1,34 +1,27
1 1 # encoding: utf-8
2 2 # -*- test-case-name: IPython.frontend.tests.test_frontendbase -*-
3 3 """
4 4 zope.interface mock. If zope is installed, this module provides a zope
5 5 interface classes, if not it provides mocks for them.
6 6
7 7 Classes provided:
8 8 Interface, Attribute, implements, classProvides
9 9 """
10 10 __docformat__ = "restructuredtext en"
11 11
12 12 #-------------------------------------------------------------------------------
13 13 # Copyright (C) 2008 The IPython Development Team
14 14 #
15 15 # Distributed under the terms of the BSD License. The full license is in
16 16 # the file COPYING, distributed as part of this software.
17 17 #-------------------------------------------------------------------------------
18 18
19 #-------------------------------------------------------------------------------
20 # Imports
21 #-------------------------------------------------------------------------------
22 import string
23 import uuid
24 import _ast
25
26 19 try:
27 20 from zope.interface import Interface, Attribute, implements, classProvides
28 21 except ImportError:
29 22 #zope.interface is not available
30 23 Interface = object
31 24 def Attribute(name, doc): pass
32 25 def implements(interface): pass
33 26 def classProvides(interface): pass
34 27
@@ -1,2132 +1,2164
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 General purpose utilities.
4 4
5 5 This is a grab-bag of stuff I find useful in most programs I write. Some of
6 6 these things are also convenient when working at the command line.
7 7
8 8 $Id: genutils.py 2998 2008-01-31 10:06:04Z vivainio $"""
9 9
10 10 #*****************************************************************************
11 11 # Copyright (C) 2001-2006 Fernando Perez. <fperez@colorado.edu>
12 12 #
13 13 # Distributed under the terms of the BSD License. The full license is in
14 14 # the file COPYING, distributed as part of this software.
15 15 #*****************************************************************************
16 16
17 17 from IPython import Release
18 18 __author__ = '%s <%s>' % Release.authors['Fernando']
19 19 __license__ = Release.license
20 20
21 21 #****************************************************************************
22 22 # required modules from the Python standard library
23 23 import __main__
24 24 import commands
25 25 try:
26 26 import doctest
27 27 except ImportError:
28 28 pass
29 29 import os
30 30 import platform
31 31 import re
32 32 import shlex
33 33 import shutil
34 34 import subprocess
35 35 import sys
36 36 import tempfile
37 37 import time
38 38 import types
39 39 import warnings
40 40
41 41 # Curses and termios are Unix-only modules
42 42 try:
43 43 import curses
44 44 # We need termios as well, so if its import happens to raise, we bail on
45 45 # using curses altogether.
46 46 import termios
47 47 except ImportError:
48 48 USE_CURSES = False
49 49 else:
50 50 # Curses on Solaris may not be complete, so we can't use it there
51 51 USE_CURSES = hasattr(curses,'initscr')
52 52
53 53 # Other IPython utilities
54 54 import IPython
55 55 from IPython.Itpl import Itpl,itpl,printpl
56 56 from IPython import DPyGetOpt, platutils
57 57 from IPython.generics import result_display
58 58 import IPython.ipapi
59 59 from IPython.external.path import path
60 60 if os.name == "nt":
61 61 from IPython.winconsole import get_console_size
62 62
63 63 try:
64 64 set
65 65 except:
66 66 from sets import Set as set
67 67
68 68
69 69 #****************************************************************************
70 70 # Exceptions
71 71 class Error(Exception):
72 72 """Base class for exceptions in this module."""
73 73 pass
74 74
75 75 #----------------------------------------------------------------------------
76 76 class IOStream:
77 77 def __init__(self,stream,fallback):
78 78 if not hasattr(stream,'write') or not hasattr(stream,'flush'):
79 79 stream = fallback
80 80 self.stream = stream
81 81 self._swrite = stream.write
82 82 self.flush = stream.flush
83 83
84 84 def write(self,data):
85 85 try:
86 86 self._swrite(data)
87 87 except:
88 88 try:
89 89 # print handles some unicode issues which may trip a plain
90 90 # write() call. Attempt to emulate write() by using a
91 91 # trailing comma
92 92 print >> self.stream, data,
93 93 except:
94 94 # if we get here, something is seriously broken.
95 95 print >> sys.stderr, \
96 96 'ERROR - failed to write data to stream:', self.stream
97 97
98 98 def close(self):
99 99 pass
100 100
101 101
102 102 class IOTerm:
103 103 """ Term holds the file or file-like objects for handling I/O operations.
104 104
105 105 These are normally just sys.stdin, sys.stdout and sys.stderr but for
106 106 Windows they can can replaced to allow editing the strings before they are
107 107 displayed."""
108 108
109 109 # In the future, having IPython channel all its I/O operations through
110 110 # this class will make it easier to embed it into other environments which
111 111 # are not a normal terminal (such as a GUI-based shell)
112 112 def __init__(self,cin=None,cout=None,cerr=None):
113 113 self.cin = IOStream(cin,sys.stdin)
114 114 self.cout = IOStream(cout,sys.stdout)
115 115 self.cerr = IOStream(cerr,sys.stderr)
116 116
117 117 # Global variable to be used for all I/O
118 118 Term = IOTerm()
119 119
120 120 import IPython.rlineimpl as readline
121 121 # Remake Term to use the readline i/o facilities
122 122 if sys.platform == 'win32' and readline.have_readline:
123 123
124 124 Term = IOTerm(cout=readline._outputfile,cerr=readline._outputfile)
125 125
126 126
127 127 #****************************************************************************
128 128 # Generic warning/error printer, used by everything else
129 129 def warn(msg,level=2,exit_val=1):
130 130 """Standard warning printer. Gives formatting consistency.
131 131
132 132 Output is sent to Term.cerr (sys.stderr by default).
133 133
134 134 Options:
135 135
136 136 -level(2): allows finer control:
137 137 0 -> Do nothing, dummy function.
138 138 1 -> Print message.
139 139 2 -> Print 'WARNING:' + message. (Default level).
140 140 3 -> Print 'ERROR:' + message.
141 141 4 -> Print 'FATAL ERROR:' + message and trigger a sys.exit(exit_val).
142 142
143 143 -exit_val (1): exit value returned by sys.exit() for a level 4
144 144 warning. Ignored for all other levels."""
145 145
146 146 if level>0:
147 147 header = ['','','WARNING: ','ERROR: ','FATAL ERROR: ']
148 148 print >> Term.cerr, '%s%s' % (header[level],msg)
149 149 if level == 4:
150 150 print >> Term.cerr,'Exiting.\n'
151 151 sys.exit(exit_val)
152 152
153 153 def info(msg):
154 154 """Equivalent to warn(msg,level=1)."""
155 155
156 156 warn(msg,level=1)
157 157
158 158 def error(msg):
159 159 """Equivalent to warn(msg,level=3)."""
160 160
161 161 warn(msg,level=3)
162 162
163 163 def fatal(msg,exit_val=1):
164 164 """Equivalent to warn(msg,exit_val=exit_val,level=4)."""
165 165
166 166 warn(msg,exit_val=exit_val,level=4)
167 167
168 168 #---------------------------------------------------------------------------
169 169 # Debugging routines
170 170 #
171 171 def debugx(expr,pre_msg=''):
172 172 """Print the value of an expression from the caller's frame.
173 173
174 174 Takes an expression, evaluates it in the caller's frame and prints both
175 175 the given expression and the resulting value (as well as a debug mark
176 176 indicating the name of the calling function. The input must be of a form
177 177 suitable for eval().
178 178
179 179 An optional message can be passed, which will be prepended to the printed
180 180 expr->value pair."""
181 181
182 182 cf = sys._getframe(1)
183 183 print '[DBG:%s] %s%s -> %r' % (cf.f_code.co_name,pre_msg,expr,
184 184 eval(expr,cf.f_globals,cf.f_locals))
185 185
186 186 # deactivate it by uncommenting the following line, which makes it a no-op
187 187 #def debugx(expr,pre_msg=''): pass
188 188
189 189 #----------------------------------------------------------------------------
190 190 StringTypes = types.StringTypes
191 191
192 192 # Basic timing functionality
193 193
194 194 # If possible (Unix), use the resource module instead of time.clock()
195 195 try:
196 196 import resource
197 197 def clocku():
198 198 """clocku() -> floating point number
199 199
200 200 Return the *USER* CPU time in seconds since the start of the process.
201 201 This is done via a call to resource.getrusage, so it avoids the
202 202 wraparound problems in time.clock()."""
203 203
204 204 return resource.getrusage(resource.RUSAGE_SELF)[0]
205 205
206 206 def clocks():
207 207 """clocks() -> floating point number
208 208
209 209 Return the *SYSTEM* CPU time in seconds since the start of the process.
210 210 This is done via a call to resource.getrusage, so it avoids the
211 211 wraparound problems in time.clock()."""
212 212
213 213 return resource.getrusage(resource.RUSAGE_SELF)[1]
214 214
215 215 def clock():
216 216 """clock() -> floating point number
217 217
218 218 Return the *TOTAL USER+SYSTEM* CPU time in seconds since the start of
219 219 the process. This is done via a call to resource.getrusage, so it
220 220 avoids the wraparound problems in time.clock()."""
221 221
222 222 u,s = resource.getrusage(resource.RUSAGE_SELF)[:2]
223 223 return u+s
224 224
225 225 def clock2():
226 226 """clock2() -> (t_user,t_system)
227 227
228 228 Similar to clock(), but return a tuple of user/system times."""
229 229 return resource.getrusage(resource.RUSAGE_SELF)[:2]
230 230
231 231 except ImportError:
232 232 # There is no distinction of user/system time under windows, so we just use
233 233 # time.clock() for everything...
234 234 clocku = clocks = clock = time.clock
235 235 def clock2():
236 236 """Under windows, system CPU time can't be measured.
237 237
238 238 This just returns clock() and zero."""
239 239 return time.clock(),0.0
240 240
241 241 def timings_out(reps,func,*args,**kw):
242 242 """timings_out(reps,func,*args,**kw) -> (t_total,t_per_call,output)
243 243
244 244 Execute a function reps times, return a tuple with the elapsed total
245 245 CPU time in seconds, the time per call and the function's output.
246 246
247 247 Under Unix, the return value is the sum of user+system time consumed by
248 248 the process, computed via the resource module. This prevents problems
249 249 related to the wraparound effect which the time.clock() function has.
250 250
251 251 Under Windows the return value is in wall clock seconds. See the
252 252 documentation for the time module for more details."""
253 253
254 254 reps = int(reps)
255 255 assert reps >=1, 'reps must be >= 1'
256 256 if reps==1:
257 257 start = clock()
258 258 out = func(*args,**kw)
259 259 tot_time = clock()-start
260 260 else:
261 261 rng = xrange(reps-1) # the last time is executed separately to store output
262 262 start = clock()
263 263 for dummy in rng: func(*args,**kw)
264 264 out = func(*args,**kw) # one last time
265 265 tot_time = clock()-start
266 266 av_time = tot_time / reps
267 267 return tot_time,av_time,out
268 268
269 269 def timings(reps,func,*args,**kw):
270 270 """timings(reps,func,*args,**kw) -> (t_total,t_per_call)
271 271
272 272 Execute a function reps times, return a tuple with the elapsed total CPU
273 273 time in seconds and the time per call. These are just the first two values
274 274 in timings_out()."""
275 275
276 276 return timings_out(reps,func,*args,**kw)[0:2]
277 277
278 278 def timing(func,*args,**kw):
279 279 """timing(func,*args,**kw) -> t_total
280 280
281 281 Execute a function once, return the elapsed total CPU time in
282 282 seconds. This is just the first value in timings_out()."""
283 283
284 284 return timings_out(1,func,*args,**kw)[0]
285 285
286 286 #****************************************************************************
287 287 # file and system
288 288
289 289 def arg_split(s,posix=False):
290 290 """Split a command line's arguments in a shell-like manner.
291 291
292 292 This is a modified version of the standard library's shlex.split()
293 293 function, but with a default of posix=False for splitting, so that quotes
294 294 in inputs are respected."""
295 295
296 296 # XXX - there may be unicode-related problems here!!! I'm not sure that
297 297 # shlex is truly unicode-safe, so it might be necessary to do
298 298 #
299 299 # s = s.encode(sys.stdin.encoding)
300 300 #
301 301 # first, to ensure that shlex gets a normal string. Input from anyone who
302 302 # knows more about unicode and shlex than I would be good to have here...
303 303 lex = shlex.shlex(s, posix=posix)
304 304 lex.whitespace_split = True
305 305 return list(lex)
306 306
307 307 def system(cmd,verbose=0,debug=0,header=''):
308 308 """Execute a system command, return its exit status.
309 309
310 310 Options:
311 311
312 312 - verbose (0): print the command to be executed.
313 313
314 314 - debug (0): only print, do not actually execute.
315 315
316 316 - header (''): Header to print on screen prior to the executed command (it
317 317 is only prepended to the command, no newlines are added).
318 318
319 319 Note: a stateful version of this function is available through the
320 320 SystemExec class."""
321 321
322 322 stat = 0
323 323 if verbose or debug: print header+cmd
324 324 sys.stdout.flush()
325 325 if not debug: stat = os.system(cmd)
326 326 return stat
327 327
328 328 def abbrev_cwd():
329 329 """ Return abbreviated version of cwd, e.g. d:mydir """
330 330 cwd = os.getcwd().replace('\\','/')
331 331 drivepart = ''
332 332 tail = cwd
333 333 if sys.platform == 'win32':
334 334 if len(cwd) < 4:
335 335 return cwd
336 336 drivepart,tail = os.path.splitdrive(cwd)
337 337
338 338
339 339 parts = tail.split('/')
340 340 if len(parts) > 2:
341 341 tail = '/'.join(parts[-2:])
342 342
343 343 return (drivepart + (
344 344 cwd == '/' and '/' or tail))
345 345
346 346
347 347 # This function is used by ipython in a lot of places to make system calls.
348 348 # We need it to be slightly different under win32, due to the vagaries of
349 349 # 'network shares'. A win32 override is below.
350 350
351 351 def shell(cmd,verbose=0,debug=0,header=''):
352 352 """Execute a command in the system shell, always return None.
353 353
354 354 Options:
355 355
356 356 - verbose (0): print the command to be executed.
357 357
358 358 - debug (0): only print, do not actually execute.
359 359
360 360 - header (''): Header to print on screen prior to the executed command (it
361 361 is only prepended to the command, no newlines are added).
362 362
363 363 Note: this is similar to genutils.system(), but it returns None so it can
364 364 be conveniently used in interactive loops without getting the return value
365 365 (typically 0) printed many times."""
366 366
367 367 stat = 0
368 368 if verbose or debug: print header+cmd
369 369 # flush stdout so we don't mangle python's buffering
370 370 sys.stdout.flush()
371 371
372 372 if not debug:
373 373 platutils.set_term_title("IPy " + cmd)
374 374 os.system(cmd)
375 375 platutils.set_term_title("IPy " + abbrev_cwd())
376 376
377 377 # override shell() for win32 to deal with network shares
378 378 if os.name in ('nt','dos'):
379 379
380 380 shell_ori = shell
381 381
382 382 def shell(cmd,verbose=0,debug=0,header=''):
383 383 if os.getcwd().startswith(r"\\"):
384 384 path = os.getcwd()
385 385 # change to c drive (cannot be on UNC-share when issuing os.system,
386 386 # as cmd.exe cannot handle UNC addresses)
387 387 os.chdir("c:")
388 388 # issue pushd to the UNC-share and then run the command
389 389 try:
390 390 shell_ori('"pushd %s&&"'%path+cmd,verbose,debug,header)
391 391 finally:
392 392 os.chdir(path)
393 393 else:
394 394 shell_ori(cmd,verbose,debug,header)
395 395
396 396 shell.__doc__ = shell_ori.__doc__
397 397
398 398 def getoutput(cmd,verbose=0,debug=0,header='',split=0):
399 399 """Dummy substitute for perl's backquotes.
400 400
401 401 Executes a command and returns the output.
402 402
403 403 Accepts the same arguments as system(), plus:
404 404
405 405 - split(0): if true, the output is returned as a list split on newlines.
406 406
407 407 Note: a stateful version of this function is available through the
408 408 SystemExec class.
409 409
410 410 This is pretty much deprecated and rarely used,
411 411 genutils.getoutputerror may be what you need.
412 412
413 413 """
414 414
415 415 if verbose or debug: print header+cmd
416 416 if not debug:
417 417 output = os.popen(cmd).read()
418 418 # stipping last \n is here for backwards compat.
419 419 if output.endswith('\n'):
420 420 output = output[:-1]
421 421 if split:
422 422 return output.split('\n')
423 423 else:
424 424 return output
425 425
426 426 def getoutputerror(cmd,verbose=0,debug=0,header='',split=0):
427 427 """Return (standard output,standard error) of executing cmd in a shell.
428 428
429 429 Accepts the same arguments as system(), plus:
430 430
431 431 - split(0): if true, each of stdout/err is returned as a list split on
432 432 newlines.
433 433
434 434 Note: a stateful version of this function is available through the
435 435 SystemExec class."""
436 436
437 437 if verbose or debug: print header+cmd
438 438 if not cmd:
439 439 if split:
440 440 return [],[]
441 441 else:
442 442 return '',''
443 443 if not debug:
444 444 pin,pout,perr = os.popen3(cmd)
445 445 tout = pout.read().rstrip()
446 446 terr = perr.read().rstrip()
447 447 pin.close()
448 448 pout.close()
449 449 perr.close()
450 450 if split:
451 451 return tout.split('\n'),terr.split('\n')
452 452 else:
453 453 return tout,terr
454 454
455 455 # for compatibility with older naming conventions
456 456 xsys = system
457 457 bq = getoutput
458 458
459 459 class SystemExec:
460 460 """Access the system and getoutput functions through a stateful interface.
461 461
462 462 Note: here we refer to the system and getoutput functions from this
463 463 library, not the ones from the standard python library.
464 464
465 465 This class offers the system and getoutput functions as methods, but the
466 466 verbose, debug and header parameters can be set for the instance (at
467 467 creation time or later) so that they don't need to be specified on each
468 468 call.
469 469
470 470 For efficiency reasons, there's no way to override the parameters on a
471 471 per-call basis other than by setting instance attributes. If you need
472 472 local overrides, it's best to directly call system() or getoutput().
473 473
474 474 The following names are provided as alternate options:
475 475 - xsys: alias to system
476 476 - bq: alias to getoutput
477 477
478 478 An instance can then be created as:
479 479 >>> sysexec = SystemExec(verbose=1,debug=0,header='Calling: ')
480 480 """
481 481
482 482 def __init__(self,verbose=0,debug=0,header='',split=0):
483 483 """Specify the instance's values for verbose, debug and header."""
484 484 setattr_list(self,'verbose debug header split')
485 485
486 486 def system(self,cmd):
487 487 """Stateful interface to system(), with the same keyword parameters."""
488 488
489 489 system(cmd,self.verbose,self.debug,self.header)
490 490
491 491 def shell(self,cmd):
492 492 """Stateful interface to shell(), with the same keyword parameters."""
493 493
494 494 shell(cmd,self.verbose,self.debug,self.header)
495 495
496 496 xsys = system # alias
497 497
498 498 def getoutput(self,cmd):
499 499 """Stateful interface to getoutput()."""
500 500
501 501 return getoutput(cmd,self.verbose,self.debug,self.header,self.split)
502 502
503 503 def getoutputerror(self,cmd):
504 504 """Stateful interface to getoutputerror()."""
505 505
506 506 return getoutputerror(cmd,self.verbose,self.debug,self.header,self.split)
507 507
508 508 bq = getoutput # alias
509 509
510 510 #-----------------------------------------------------------------------------
511 511 def mutex_opts(dict,ex_op):
512 512 """Check for presence of mutually exclusive keys in a dict.
513 513
514 514 Call: mutex_opts(dict,[[op1a,op1b],[op2a,op2b]...]"""
515 515 for op1,op2 in ex_op:
516 516 if op1 in dict and op2 in dict:
517 517 raise ValueError,'\n*** ERROR in Arguments *** '\
518 518 'Options '+op1+' and '+op2+' are mutually exclusive.'
519 519
520 520 #-----------------------------------------------------------------------------
521 521 def get_py_filename(name):
522 522 """Return a valid python filename in the current directory.
523 523
524 524 If the given name is not a file, it adds '.py' and searches again.
525 525 Raises IOError with an informative message if the file isn't found."""
526 526
527 527 name = os.path.expanduser(name)
528 528 if not os.path.isfile(name) and not name.endswith('.py'):
529 529 name += '.py'
530 530 if os.path.isfile(name):
531 531 return name
532 532 else:
533 533 raise IOError,'File `%s` not found.' % name
534 534
535 535 #-----------------------------------------------------------------------------
536 536 def filefind(fname,alt_dirs = None):
537 537 """Return the given filename either in the current directory, if it
538 538 exists, or in a specified list of directories.
539 539
540 540 ~ expansion is done on all file and directory names.
541 541
542 542 Upon an unsuccessful search, raise an IOError exception."""
543 543
544 544 if alt_dirs is None:
545 545 try:
546 546 alt_dirs = get_home_dir()
547 547 except HomeDirError:
548 548 alt_dirs = os.getcwd()
549 549 search = [fname] + list_strings(alt_dirs)
550 550 search = map(os.path.expanduser,search)
551 551 #print 'search list for',fname,'list:',search # dbg
552 552 fname = search[0]
553 553 if os.path.isfile(fname):
554 554 return fname
555 555 for direc in search[1:]:
556 556 testname = os.path.join(direc,fname)
557 557 #print 'testname',testname # dbg
558 558 if os.path.isfile(testname):
559 559 return testname
560 560 raise IOError,'File' + `fname` + \
561 561 ' not found in current or supplied directories:' + `alt_dirs`
562 562
563 563 #----------------------------------------------------------------------------
564 564 def file_read(filename):
565 565 """Read a file and close it. Returns the file source."""
566 566 fobj = open(filename,'r');
567 567 source = fobj.read();
568 568 fobj.close()
569 569 return source
570 570
571 571 def file_readlines(filename):
572 572 """Read a file and close it. Returns the file source using readlines()."""
573 573 fobj = open(filename,'r');
574 574 lines = fobj.readlines();
575 575 fobj.close()
576 576 return lines
577 577
578 578 #----------------------------------------------------------------------------
579 579 def target_outdated(target,deps):
580 580 """Determine whether a target is out of date.
581 581
582 582 target_outdated(target,deps) -> 1/0
583 583
584 584 deps: list of filenames which MUST exist.
585 585 target: single filename which may or may not exist.
586 586
587 587 If target doesn't exist or is older than any file listed in deps, return
588 588 true, otherwise return false.
589 589 """
590 590 try:
591 591 target_time = os.path.getmtime(target)
592 592 except os.error:
593 593 return 1
594 594 for dep in deps:
595 595 dep_time = os.path.getmtime(dep)
596 596 if dep_time > target_time:
597 597 #print "For target",target,"Dep failed:",dep # dbg
598 598 #print "times (dep,tar):",dep_time,target_time # dbg
599 599 return 1
600 600 return 0
601 601
602 602 #-----------------------------------------------------------------------------
603 603 def target_update(target,deps,cmd):
604 604 """Update a target with a given command given a list of dependencies.
605 605
606 606 target_update(target,deps,cmd) -> runs cmd if target is outdated.
607 607
608 608 This is just a wrapper around target_outdated() which calls the given
609 609 command if target is outdated."""
610 610
611 611 if target_outdated(target,deps):
612 612 xsys(cmd)
613 613
614 614 #----------------------------------------------------------------------------
615 615 def unquote_ends(istr):
616 616 """Remove a single pair of quotes from the endpoints of a string."""
617 617
618 618 if not istr:
619 619 return istr
620 620 if (istr[0]=="'" and istr[-1]=="'") or \
621 621 (istr[0]=='"' and istr[-1]=='"'):
622 622 return istr[1:-1]
623 623 else:
624 624 return istr
625 625
626 626 #----------------------------------------------------------------------------
627 627 def process_cmdline(argv,names=[],defaults={},usage=''):
628 628 """ Process command-line options and arguments.
629 629
630 630 Arguments:
631 631
632 632 - argv: list of arguments, typically sys.argv.
633 633
634 634 - names: list of option names. See DPyGetOpt docs for details on options
635 635 syntax.
636 636
637 637 - defaults: dict of default values.
638 638
639 639 - usage: optional usage notice to print if a wrong argument is passed.
640 640
641 641 Return a dict of options and a list of free arguments."""
642 642
643 643 getopt = DPyGetOpt.DPyGetOpt()
644 644 getopt.setIgnoreCase(0)
645 645 getopt.parseConfiguration(names)
646 646
647 647 try:
648 648 getopt.processArguments(argv)
649 649 except DPyGetOpt.ArgumentError, exc:
650 650 print usage
651 651 warn('"%s"' % exc,level=4)
652 652
653 653 defaults.update(getopt.optionValues)
654 654 args = getopt.freeValues
655 655
656 656 return defaults,args
657 657
658 658 #----------------------------------------------------------------------------
659 659 def optstr2types(ostr):
660 660 """Convert a string of option names to a dict of type mappings.
661 661
662 662 optstr2types(str) -> {None:'string_opts',int:'int_opts',float:'float_opts'}
663 663
664 664 This is used to get the types of all the options in a string formatted
665 665 with the conventions of DPyGetOpt. The 'type' None is used for options
666 666 which are strings (they need no further conversion). This function's main
667 667 use is to get a typemap for use with read_dict().
668 668 """
669 669
670 670 typeconv = {None:'',int:'',float:''}
671 671 typemap = {'s':None,'i':int,'f':float}
672 672 opt_re = re.compile(r'([\w]*)([^:=]*:?=?)([sif]?)')
673 673
674 674 for w in ostr.split():
675 675 oname,alias,otype = opt_re.match(w).groups()
676 676 if otype == '' or alias == '!': # simple switches are integers too
677 677 otype = 'i'
678 678 typeconv[typemap[otype]] += oname + ' '
679 679 return typeconv
680 680
681 681 #----------------------------------------------------------------------------
682 682 def read_dict(filename,type_conv=None,**opt):
683 683 r"""Read a dictionary of key=value pairs from an input file, optionally
684 684 performing conversions on the resulting values.
685 685
686 686 read_dict(filename,type_conv,**opt) -> dict
687 687
688 688 Only one value per line is accepted, the format should be
689 689 # optional comments are ignored
690 690 key value\n
691 691
692 692 Args:
693 693
694 694 - type_conv: A dictionary specifying which keys need to be converted to
695 695 which types. By default all keys are read as strings. This dictionary
696 696 should have as its keys valid conversion functions for strings
697 697 (int,long,float,complex, or your own). The value for each key
698 698 (converter) should be a whitespace separated string containing the names
699 699 of all the entries in the file to be converted using that function. For
700 700 keys to be left alone, use None as the conversion function (only needed
701 701 with purge=1, see below).
702 702
703 703 - opt: dictionary with extra options as below (default in parens)
704 704
705 705 purge(0): if set to 1, all keys *not* listed in type_conv are purged out
706 706 of the dictionary to be returned. If purge is going to be used, the
707 707 set of keys to be left as strings also has to be explicitly specified
708 708 using the (non-existent) conversion function None.
709 709
710 710 fs(None): field separator. This is the key/value separator to be used
711 711 when parsing the file. The None default means any whitespace [behavior
712 712 of string.split()].
713 713
714 714 strip(0): if 1, strip string values of leading/trailinig whitespace.
715 715
716 716 warn(1): warning level if requested keys are not found in file.
717 717 - 0: silently ignore.
718 718 - 1: inform but proceed.
719 719 - 2: raise KeyError exception.
720 720
721 721 no_empty(0): if 1, remove keys with whitespace strings as a value.
722 722
723 723 unique([]): list of keys (or space separated string) which can't be
724 724 repeated. If one such key is found in the file, each new instance
725 725 overwrites the previous one. For keys not listed here, the behavior is
726 726 to make a list of all appearances.
727 727
728 728 Example:
729 729
730 730 If the input file test.ini contains (we put it in a string to keep the test
731 731 self-contained):
732 732
733 733 >>> test_ini = '''\
734 734 ... i 3
735 735 ... x 4.5
736 736 ... y 5.5
737 737 ... s hi ho'''
738 738
739 739 Then we can use it as follows:
740 740 >>> type_conv={int:'i',float:'x',None:'s'}
741 741
742 742 >>> d = read_dict(test_ini)
743 743
744 744 >>> sorted(d.items())
745 745 [('i', '3'), ('s', 'hi ho'), ('x', '4.5'), ('y', '5.5')]
746 746
747 747 >>> d = read_dict(test_ini,type_conv)
748 748
749 749 >>> sorted(d.items())
750 750 [('i', 3), ('s', 'hi ho'), ('x', 4.5), ('y', '5.5')]
751 751
752 752 >>> d = read_dict(test_ini,type_conv,purge=True)
753 753
754 754 >>> sorted(d.items())
755 755 [('i', 3), ('s', 'hi ho'), ('x', 4.5)]
756 756 """
757 757
758 758 # starting config
759 759 opt.setdefault('purge',0)
760 760 opt.setdefault('fs',None) # field sep defaults to any whitespace
761 761 opt.setdefault('strip',0)
762 762 opt.setdefault('warn',1)
763 763 opt.setdefault('no_empty',0)
764 764 opt.setdefault('unique','')
765 765 if type(opt['unique']) in StringTypes:
766 766 unique_keys = qw(opt['unique'])
767 767 elif type(opt['unique']) in (types.TupleType,types.ListType):
768 768 unique_keys = opt['unique']
769 769 else:
770 770 raise ValueError, 'Unique keys must be given as a string, List or Tuple'
771 771
772 772 dict = {}
773 773
774 774 # first read in table of values as strings
775 775 if '\n' in filename:
776 776 lines = filename.splitlines()
777 777 file = None
778 778 else:
779 779 file = open(filename,'r')
780 780 lines = file.readlines()
781 781 for line in lines:
782 782 line = line.strip()
783 783 if len(line) and line[0]=='#': continue
784 784 if len(line)>0:
785 785 lsplit = line.split(opt['fs'],1)
786 786 try:
787 787 key,val = lsplit
788 788 except ValueError:
789 789 key,val = lsplit[0],''
790 790 key = key.strip()
791 791 if opt['strip']: val = val.strip()
792 792 if val == "''" or val == '""': val = ''
793 793 if opt['no_empty'] and (val=='' or val.isspace()):
794 794 continue
795 795 # if a key is found more than once in the file, build a list
796 796 # unless it's in the 'unique' list. In that case, last found in file
797 797 # takes precedence. User beware.
798 798 try:
799 799 if dict[key] and key in unique_keys:
800 800 dict[key] = val
801 801 elif type(dict[key]) is types.ListType:
802 802 dict[key].append(val)
803 803 else:
804 804 dict[key] = [dict[key],val]
805 805 except KeyError:
806 806 dict[key] = val
807 807 # purge if requested
808 808 if opt['purge']:
809 809 accepted_keys = qwflat(type_conv.values())
810 810 for key in dict.keys():
811 811 if key in accepted_keys: continue
812 812 del(dict[key])
813 813 # now convert if requested
814 814 if type_conv==None: return dict
815 815 conversions = type_conv.keys()
816 816 try: conversions.remove(None)
817 817 except: pass
818 818 for convert in conversions:
819 819 for val in qw(type_conv[convert]):
820 820 try:
821 821 dict[val] = convert(dict[val])
822 822 except KeyError,e:
823 823 if opt['warn'] == 0:
824 824 pass
825 825 elif opt['warn'] == 1:
826 826 print >>sys.stderr, 'Warning: key',val,\
827 827 'not found in file',filename
828 828 elif opt['warn'] == 2:
829 829 raise KeyError,e
830 830 else:
831 831 raise ValueError,'Warning level must be 0,1 or 2'
832 832
833 833 return dict
834 834
835 835 #----------------------------------------------------------------------------
836 836 def flag_calls(func):
837 837 """Wrap a function to detect and flag when it gets called.
838 838
839 839 This is a decorator which takes a function and wraps it in a function with
840 840 a 'called' attribute. wrapper.called is initialized to False.
841 841
842 842 The wrapper.called attribute is set to False right before each call to the
843 843 wrapped function, so if the call fails it remains False. After the call
844 844 completes, wrapper.called is set to True and the output is returned.
845 845
846 846 Testing for truth in wrapper.called allows you to determine if a call to
847 847 func() was attempted and succeeded."""
848 848
849 849 def wrapper(*args,**kw):
850 850 wrapper.called = False
851 851 out = func(*args,**kw)
852 852 wrapper.called = True
853 853 return out
854 854
855 855 wrapper.called = False
856 856 wrapper.__doc__ = func.__doc__
857 857 return wrapper
858 858
859 859 #----------------------------------------------------------------------------
860 860 def dhook_wrap(func,*a,**k):
861 861 """Wrap a function call in a sys.displayhook controller.
862 862
863 863 Returns a wrapper around func which calls func, with all its arguments and
864 864 keywords unmodified, using the default sys.displayhook. Since IPython
865 865 modifies sys.displayhook, it breaks the behavior of certain systems that
866 866 rely on the default behavior, notably doctest.
867 867 """
868 868
869 869 def f(*a,**k):
870 870
871 871 dhook_s = sys.displayhook
872 872 sys.displayhook = sys.__displayhook__
873 873 try:
874 874 out = func(*a,**k)
875 875 finally:
876 876 sys.displayhook = dhook_s
877 877
878 878 return out
879 879
880 880 f.__doc__ = func.__doc__
881 881 return f
882 882
883 883 #----------------------------------------------------------------------------
884 884 def doctest_reload():
885 885 """Properly reload doctest to reuse it interactively.
886 886
887 887 This routine:
888 888
889 889 - reloads doctest
890 890
891 891 - resets its global 'master' attribute to None, so that multiple uses of
892 892 the module interactively don't produce cumulative reports.
893 893
894 894 - Monkeypatches its core test runner method to protect it from IPython's
895 895 modified displayhook. Doctest expects the default displayhook behavior
896 896 deep down, so our modification breaks it completely. For this reason, a
897 897 hard monkeypatch seems like a reasonable solution rather than asking
898 898 users to manually use a different doctest runner when under IPython."""
899 899
900 900 import doctest
901 901 reload(doctest)
902 902 doctest.master=None
903 903
904 904 try:
905 905 doctest.DocTestRunner
906 906 except AttributeError:
907 907 # This is only for python 2.3 compatibility, remove once we move to
908 908 # 2.4 only.
909 909 pass
910 910 else:
911 911 doctest.DocTestRunner.run = dhook_wrap(doctest.DocTestRunner.run)
912 912
913 913 #----------------------------------------------------------------------------
914 914 class HomeDirError(Error):
915 915 pass
916 916
917 917 def get_home_dir():
918 918 """Return the closest possible equivalent to a 'home' directory.
919 919
920 920 We first try $HOME. Absent that, on NT it's $HOMEDRIVE\$HOMEPATH.
921 921
922 922 Currently only Posix and NT are implemented, a HomeDirError exception is
923 923 raised for all other OSes. """
924 924
925 925 isdir = os.path.isdir
926 926 env = os.environ
927 927
928 928 # first, check py2exe distribution root directory for _ipython.
929 929 # This overrides all. Normally does not exist.
930 930
931 931 if '\\library.zip\\' in IPython.__file__.lower():
932 932 root, rest = IPython.__file__.lower().split('library.zip')
933 933 if isdir(root + '_ipython'):
934 934 os.environ["IPYKITROOT"] = root.rstrip('\\')
935 935 return root
936 936
937 937 try:
938 938 homedir = env['HOME']
939 939 if not isdir(homedir):
940 940 # in case a user stuck some string which does NOT resolve to a
941 941 # valid path, it's as good as if we hadn't foud it
942 942 raise KeyError
943 943 return homedir
944 944 except KeyError:
945 945 if os.name == 'posix':
946 946 raise HomeDirError,'undefined $HOME, IPython can not proceed.'
947 947 elif os.name == 'nt':
948 948 # For some strange reason, win9x returns 'nt' for os.name.
949 949 try:
950 950 homedir = os.path.join(env['HOMEDRIVE'],env['HOMEPATH'])
951 951 if not isdir(homedir):
952 952 homedir = os.path.join(env['USERPROFILE'])
953 953 if not isdir(homedir):
954 954 raise HomeDirError
955 955 return homedir
956 956 except:
957 957 try:
958 958 # Use the registry to get the 'My Documents' folder.
959 959 import _winreg as wreg
960 960 key = wreg.OpenKey(wreg.HKEY_CURRENT_USER,
961 961 "Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
962 962 homedir = wreg.QueryValueEx(key,'Personal')[0]
963 963 key.Close()
964 964 if not isdir(homedir):
965 965 e = ('Invalid "Personal" folder registry key '
966 966 'typically "My Documents".\n'
967 967 'Value: %s\n'
968 968 'This is not a valid directory on your system.' %
969 969 homedir)
970 970 raise HomeDirError(e)
971 971 return homedir
972 972 except HomeDirError:
973 973 raise
974 974 except:
975 975 return 'C:\\'
976 976 elif os.name == 'dos':
977 977 # Desperate, may do absurd things in classic MacOS. May work under DOS.
978 978 return 'C:\\'
979 979 else:
980 980 raise HomeDirError,'support for your operating system not implemented.'
981 981
982
983 def get_ipython_dir():
984 """Get the IPython directory for this platform and user.
985
986 This uses the logic in `get_home_dir` to find the home directory
987 and the adds either .ipython or _ipython to the end of the path.
988 """
989 if os.name == 'posix':
990 ipdir_def = '.ipython'
991 else:
992 ipdir_def = '_ipython'
993 home_dir = get_home_dir()
994 ipdir = os.path.abspath(os.environ.get('IPYTHONDIR',
995 os.path.join(home_dir,ipdir_def)))
996 return ipdir
997
998 def get_security_dir():
999 """Get the IPython security directory.
1000
1001 This directory is the default location for all security related files,
1002 including SSL/TLS certificates and FURL files.
1003
1004 If the directory does not exist, it is created with 0700 permissions.
1005 If it exists, permissions are set to 0700.
1006 """
1007 security_dir = os.path.join(get_ipython_dir(), 'security')
1008 if not os.path.isdir(security_dir):
1009 os.mkdir(security_dir, 0700)
1010 else:
1011 os.chmod(security_dir, 0700)
1012 return security_dir
1013
982 1014 #****************************************************************************
983 1015 # strings and text
984 1016
985 1017 class LSString(str):
986 1018 """String derivative with a special access attributes.
987 1019
988 1020 These are normal strings, but with the special attributes:
989 1021
990 1022 .l (or .list) : value as list (split on newlines).
991 1023 .n (or .nlstr): original value (the string itself).
992 1024 .s (or .spstr): value as whitespace-separated string.
993 1025 .p (or .paths): list of path objects
994 1026
995 1027 Any values which require transformations are computed only once and
996 1028 cached.
997 1029
998 1030 Such strings are very useful to efficiently interact with the shell, which
999 1031 typically only understands whitespace-separated options for commands."""
1000 1032
1001 1033 def get_list(self):
1002 1034 try:
1003 1035 return self.__list
1004 1036 except AttributeError:
1005 1037 self.__list = self.split('\n')
1006 1038 return self.__list
1007 1039
1008 1040 l = list = property(get_list)
1009 1041
1010 1042 def get_spstr(self):
1011 1043 try:
1012 1044 return self.__spstr
1013 1045 except AttributeError:
1014 1046 self.__spstr = self.replace('\n',' ')
1015 1047 return self.__spstr
1016 1048
1017 1049 s = spstr = property(get_spstr)
1018 1050
1019 1051 def get_nlstr(self):
1020 1052 return self
1021 1053
1022 1054 n = nlstr = property(get_nlstr)
1023 1055
1024 1056 def get_paths(self):
1025 1057 try:
1026 1058 return self.__paths
1027 1059 except AttributeError:
1028 1060 self.__paths = [path(p) for p in self.split('\n') if os.path.exists(p)]
1029 1061 return self.__paths
1030 1062
1031 1063 p = paths = property(get_paths)
1032 1064
1033 1065 def print_lsstring(arg):
1034 1066 """ Prettier (non-repr-like) and more informative printer for LSString """
1035 1067 print "LSString (.p, .n, .l, .s available). Value:"
1036 1068 print arg
1037 1069
1038 1070 print_lsstring = result_display.when_type(LSString)(print_lsstring)
1039 1071
1040 1072 #----------------------------------------------------------------------------
1041 1073 class SList(list):
1042 1074 """List derivative with a special access attributes.
1043 1075
1044 1076 These are normal lists, but with the special attributes:
1045 1077
1046 1078 .l (or .list) : value as list (the list itself).
1047 1079 .n (or .nlstr): value as a string, joined on newlines.
1048 1080 .s (or .spstr): value as a string, joined on spaces.
1049 1081 .p (or .paths): list of path objects
1050 1082
1051 1083 Any values which require transformations are computed only once and
1052 1084 cached."""
1053 1085
1054 1086 def get_list(self):
1055 1087 return self
1056 1088
1057 1089 l = list = property(get_list)
1058 1090
1059 1091 def get_spstr(self):
1060 1092 try:
1061 1093 return self.__spstr
1062 1094 except AttributeError:
1063 1095 self.__spstr = ' '.join(self)
1064 1096 return self.__spstr
1065 1097
1066 1098 s = spstr = property(get_spstr)
1067 1099
1068 1100 def get_nlstr(self):
1069 1101 try:
1070 1102 return self.__nlstr
1071 1103 except AttributeError:
1072 1104 self.__nlstr = '\n'.join(self)
1073 1105 return self.__nlstr
1074 1106
1075 1107 n = nlstr = property(get_nlstr)
1076 1108
1077 1109 def get_paths(self):
1078 1110 try:
1079 1111 return self.__paths
1080 1112 except AttributeError:
1081 1113 self.__paths = [path(p) for p in self if os.path.exists(p)]
1082 1114 return self.__paths
1083 1115
1084 1116 p = paths = property(get_paths)
1085 1117
1086 1118 def grep(self, pattern, prune = False, field = None):
1087 1119 """ Return all strings matching 'pattern' (a regex or callable)
1088 1120
1089 1121 This is case-insensitive. If prune is true, return all items
1090 1122 NOT matching the pattern.
1091 1123
1092 1124 If field is specified, the match must occur in the specified
1093 1125 whitespace-separated field.
1094 1126
1095 1127 Examples::
1096 1128
1097 1129 a.grep( lambda x: x.startswith('C') )
1098 1130 a.grep('Cha.*log', prune=1)
1099 1131 a.grep('chm', field=-1)
1100 1132 """
1101 1133
1102 1134 def match_target(s):
1103 1135 if field is None:
1104 1136 return s
1105 1137 parts = s.split()
1106 1138 try:
1107 1139 tgt = parts[field]
1108 1140 return tgt
1109 1141 except IndexError:
1110 1142 return ""
1111 1143
1112 1144 if isinstance(pattern, basestring):
1113 1145 pred = lambda x : re.search(pattern, x, re.IGNORECASE)
1114 1146 else:
1115 1147 pred = pattern
1116 1148 if not prune:
1117 1149 return SList([el for el in self if pred(match_target(el))])
1118 1150 else:
1119 1151 return SList([el for el in self if not pred(match_target(el))])
1120 1152 def fields(self, *fields):
1121 1153 """ Collect whitespace-separated fields from string list
1122 1154
1123 1155 Allows quick awk-like usage of string lists.
1124 1156
1125 1157 Example data (in var a, created by 'a = !ls -l')::
1126 1158 -rwxrwxrwx 1 ville None 18 Dec 14 2006 ChangeLog
1127 1159 drwxrwxrwx+ 6 ville None 0 Oct 24 18:05 IPython
1128 1160
1129 1161 a.fields(0) is ['-rwxrwxrwx', 'drwxrwxrwx+']
1130 1162 a.fields(1,0) is ['1 -rwxrwxrwx', '6 drwxrwxrwx+']
1131 1163 (note the joining by space).
1132 1164 a.fields(-1) is ['ChangeLog', 'IPython']
1133 1165
1134 1166 IndexErrors are ignored.
1135 1167
1136 1168 Without args, fields() just split()'s the strings.
1137 1169 """
1138 1170 if len(fields) == 0:
1139 1171 return [el.split() for el in self]
1140 1172
1141 1173 res = SList()
1142 1174 for el in [f.split() for f in self]:
1143 1175 lineparts = []
1144 1176
1145 1177 for fd in fields:
1146 1178 try:
1147 1179 lineparts.append(el[fd])
1148 1180 except IndexError:
1149 1181 pass
1150 1182 if lineparts:
1151 1183 res.append(" ".join(lineparts))
1152 1184
1153 1185 return res
1154 1186 def sort(self,field= None, nums = False):
1155 1187 """ sort by specified fields (see fields())
1156 1188
1157 1189 Example::
1158 1190 a.sort(1, nums = True)
1159 1191
1160 1192 Sorts a by second field, in numerical order (so that 21 > 3)
1161 1193
1162 1194 """
1163 1195
1164 1196 #decorate, sort, undecorate
1165 1197 if field is not None:
1166 1198 dsu = [[SList([line]).fields(field), line] for line in self]
1167 1199 else:
1168 1200 dsu = [[line, line] for line in self]
1169 1201 if nums:
1170 1202 for i in range(len(dsu)):
1171 1203 numstr = "".join([ch for ch in dsu[i][0] if ch.isdigit()])
1172 1204 try:
1173 1205 n = int(numstr)
1174 1206 except ValueError:
1175 1207 n = 0;
1176 1208 dsu[i][0] = n
1177 1209
1178 1210
1179 1211 dsu.sort()
1180 1212 return SList([t[1] for t in dsu])
1181 1213
1182 1214 def print_slist(arg):
1183 1215 """ Prettier (non-repr-like) and more informative printer for SList """
1184 1216 print "SList (.p, .n, .l, .s, .grep(), .fields(), sort() available):"
1185 1217 if hasattr(arg, 'hideonce') and arg.hideonce:
1186 1218 arg.hideonce = False
1187 1219 return
1188 1220
1189 1221 nlprint(arg)
1190 1222
1191 1223 print_slist = result_display.when_type(SList)(print_slist)
1192 1224
1193 1225
1194 1226
1195 1227 #----------------------------------------------------------------------------
1196 1228 def esc_quotes(strng):
1197 1229 """Return the input string with single and double quotes escaped out"""
1198 1230
1199 1231 return strng.replace('"','\\"').replace("'","\\'")
1200 1232
1201 1233 #----------------------------------------------------------------------------
1202 1234 def make_quoted_expr(s):
1203 1235 """Return string s in appropriate quotes, using raw string if possible.
1204 1236
1205 1237 Effectively this turns string: cd \ao\ao\
1206 1238 to: r"cd \ao\ao\_"[:-1]
1207 1239
1208 1240 Note the use of raw string and padding at the end to allow trailing backslash.
1209 1241
1210 1242 """
1211 1243
1212 1244 tail = ''
1213 1245 tailpadding = ''
1214 1246 raw = ''
1215 1247 if "\\" in s:
1216 1248 raw = 'r'
1217 1249 if s.endswith('\\'):
1218 1250 tail = '[:-1]'
1219 1251 tailpadding = '_'
1220 1252 if '"' not in s:
1221 1253 quote = '"'
1222 1254 elif "'" not in s:
1223 1255 quote = "'"
1224 1256 elif '"""' not in s and not s.endswith('"'):
1225 1257 quote = '"""'
1226 1258 elif "'''" not in s and not s.endswith("'"):
1227 1259 quote = "'''"
1228 1260 else:
1229 1261 # give up, backslash-escaped string will do
1230 1262 return '"%s"' % esc_quotes(s)
1231 1263 res = raw + quote + s + tailpadding + quote + tail
1232 1264 return res
1233 1265
1234 1266
1235 1267 #----------------------------------------------------------------------------
1236 1268 def raw_input_multi(header='', ps1='==> ', ps2='..> ',terminate_str = '.'):
1237 1269 """Take multiple lines of input.
1238 1270
1239 1271 A list with each line of input as a separate element is returned when a
1240 1272 termination string is entered (defaults to a single '.'). Input can also
1241 1273 terminate via EOF (^D in Unix, ^Z-RET in Windows).
1242 1274
1243 1275 Lines of input which end in \\ are joined into single entries (and a
1244 1276 secondary continuation prompt is issued as long as the user terminates
1245 1277 lines with \\). This allows entering very long strings which are still
1246 1278 meant to be treated as single entities.
1247 1279 """
1248 1280
1249 1281 try:
1250 1282 if header:
1251 1283 header += '\n'
1252 1284 lines = [raw_input(header + ps1)]
1253 1285 except EOFError:
1254 1286 return []
1255 1287 terminate = [terminate_str]
1256 1288 try:
1257 1289 while lines[-1:] != terminate:
1258 1290 new_line = raw_input(ps1)
1259 1291 while new_line.endswith('\\'):
1260 1292 new_line = new_line[:-1] + raw_input(ps2)
1261 1293 lines.append(new_line)
1262 1294
1263 1295 return lines[:-1] # don't return the termination command
1264 1296 except EOFError:
1265 1297 print
1266 1298 return lines
1267 1299
1268 1300 #----------------------------------------------------------------------------
1269 1301 def raw_input_ext(prompt='', ps2='... '):
1270 1302 """Similar to raw_input(), but accepts extended lines if input ends with \\."""
1271 1303
1272 1304 line = raw_input(prompt)
1273 1305 while line.endswith('\\'):
1274 1306 line = line[:-1] + raw_input(ps2)
1275 1307 return line
1276 1308
1277 1309 #----------------------------------------------------------------------------
1278 1310 def ask_yes_no(prompt,default=None):
1279 1311 """Asks a question and returns a boolean (y/n) answer.
1280 1312
1281 1313 If default is given (one of 'y','n'), it is used if the user input is
1282 1314 empty. Otherwise the question is repeated until an answer is given.
1283 1315
1284 1316 An EOF is treated as the default answer. If there is no default, an
1285 1317 exception is raised to prevent infinite loops.
1286 1318
1287 1319 Valid answers are: y/yes/n/no (match is not case sensitive)."""
1288 1320
1289 1321 answers = {'y':True,'n':False,'yes':True,'no':False}
1290 1322 ans = None
1291 1323 while ans not in answers.keys():
1292 1324 try:
1293 1325 ans = raw_input(prompt+' ').lower()
1294 1326 if not ans: # response was an empty string
1295 1327 ans = default
1296 1328 except KeyboardInterrupt:
1297 1329 pass
1298 1330 except EOFError:
1299 1331 if default in answers.keys():
1300 1332 ans = default
1301 1333 print
1302 1334 else:
1303 1335 raise
1304 1336
1305 1337 return answers[ans]
1306 1338
1307 1339 #----------------------------------------------------------------------------
1308 1340 def marquee(txt='',width=78,mark='*'):
1309 1341 """Return the input string centered in a 'marquee'."""
1310 1342 if not txt:
1311 1343 return (mark*width)[:width]
1312 1344 nmark = (width-len(txt)-2)/len(mark)/2
1313 1345 if nmark < 0: nmark =0
1314 1346 marks = mark*nmark
1315 1347 return '%s %s %s' % (marks,txt,marks)
1316 1348
1317 1349 #----------------------------------------------------------------------------
1318 1350 class EvalDict:
1319 1351 """
1320 1352 Emulate a dict which evaluates its contents in the caller's frame.
1321 1353
1322 1354 Usage:
1323 1355 >>> number = 19
1324 1356
1325 1357 >>> text = "python"
1326 1358
1327 1359 >>> print "%(text.capitalize())s %(number/9.0).1f rules!" % EvalDict()
1328 1360 Python 2.1 rules!
1329 1361 """
1330 1362
1331 1363 # This version is due to sismex01@hebmex.com on c.l.py, and is basically a
1332 1364 # modified (shorter) version of:
1333 1365 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66018 by
1334 1366 # Skip Montanaro (skip@pobox.com).
1335 1367
1336 1368 def __getitem__(self, name):
1337 1369 frame = sys._getframe(1)
1338 1370 return eval(name, frame.f_globals, frame.f_locals)
1339 1371
1340 1372 EvalString = EvalDict # for backwards compatibility
1341 1373 #----------------------------------------------------------------------------
1342 1374 def qw(words,flat=0,sep=None,maxsplit=-1):
1343 1375 """Similar to Perl's qw() operator, but with some more options.
1344 1376
1345 1377 qw(words,flat=0,sep=' ',maxsplit=-1) -> words.split(sep,maxsplit)
1346 1378
1347 1379 words can also be a list itself, and with flat=1, the output will be
1348 1380 recursively flattened.
1349 1381
1350 1382 Examples:
1351 1383
1352 1384 >>> qw('1 2')
1353 1385 ['1', '2']
1354 1386
1355 1387 >>> qw(['a b','1 2',['m n','p q']])
1356 1388 [['a', 'b'], ['1', '2'], [['m', 'n'], ['p', 'q']]]
1357 1389
1358 1390 >>> qw(['a b','1 2',['m n','p q']],flat=1)
1359 1391 ['a', 'b', '1', '2', 'm', 'n', 'p', 'q']
1360 1392 """
1361 1393
1362 1394 if type(words) in StringTypes:
1363 1395 return [word.strip() for word in words.split(sep,maxsplit)
1364 1396 if word and not word.isspace() ]
1365 1397 if flat:
1366 1398 return flatten(map(qw,words,[1]*len(words)))
1367 1399 return map(qw,words)
1368 1400
1369 1401 #----------------------------------------------------------------------------
1370 1402 def qwflat(words,sep=None,maxsplit=-1):
1371 1403 """Calls qw(words) in flat mode. It's just a convenient shorthand."""
1372 1404 return qw(words,1,sep,maxsplit)
1373 1405
1374 1406 #----------------------------------------------------------------------------
1375 1407 def qw_lol(indata):
1376 1408 """qw_lol('a b') -> [['a','b']],
1377 1409 otherwise it's just a call to qw().
1378 1410
1379 1411 We need this to make sure the modules_some keys *always* end up as a
1380 1412 list of lists."""
1381 1413
1382 1414 if type(indata) in StringTypes:
1383 1415 return [qw(indata)]
1384 1416 else:
1385 1417 return qw(indata)
1386 1418
1387 1419 #-----------------------------------------------------------------------------
1388 1420 def list_strings(arg):
1389 1421 """Always return a list of strings, given a string or list of strings
1390 1422 as input."""
1391 1423
1392 1424 if type(arg) in StringTypes: return [arg]
1393 1425 else: return arg
1394 1426
1395 1427 #----------------------------------------------------------------------------
1396 1428 def grep(pat,list,case=1):
1397 1429 """Simple minded grep-like function.
1398 1430 grep(pat,list) returns occurrences of pat in list, None on failure.
1399 1431
1400 1432 It only does simple string matching, with no support for regexps. Use the
1401 1433 option case=0 for case-insensitive matching."""
1402 1434
1403 1435 # This is pretty crude. At least it should implement copying only references
1404 1436 # to the original data in case it's big. Now it copies the data for output.
1405 1437 out=[]
1406 1438 if case:
1407 1439 for term in list:
1408 1440 if term.find(pat)>-1: out.append(term)
1409 1441 else:
1410 1442 lpat=pat.lower()
1411 1443 for term in list:
1412 1444 if term.lower().find(lpat)>-1: out.append(term)
1413 1445
1414 1446 if len(out): return out
1415 1447 else: return None
1416 1448
1417 1449 #----------------------------------------------------------------------------
1418 1450 def dgrep(pat,*opts):
1419 1451 """Return grep() on dir()+dir(__builtins__).
1420 1452
1421 1453 A very common use of grep() when working interactively."""
1422 1454
1423 1455 return grep(pat,dir(__main__)+dir(__main__.__builtins__),*opts)
1424 1456
1425 1457 #----------------------------------------------------------------------------
1426 1458 def idgrep(pat):
1427 1459 """Case-insensitive dgrep()"""
1428 1460
1429 1461 return dgrep(pat,0)
1430 1462
1431 1463 #----------------------------------------------------------------------------
1432 1464 def igrep(pat,list):
1433 1465 """Synonym for case-insensitive grep."""
1434 1466
1435 1467 return grep(pat,list,case=0)
1436 1468
1437 1469 #----------------------------------------------------------------------------
1438 1470 def indent(str,nspaces=4,ntabs=0):
1439 1471 """Indent a string a given number of spaces or tabstops.
1440 1472
1441 1473 indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces.
1442 1474 """
1443 1475 if str is None:
1444 1476 return
1445 1477 ind = '\t'*ntabs+' '*nspaces
1446 1478 outstr = '%s%s' % (ind,str.replace(os.linesep,os.linesep+ind))
1447 1479 if outstr.endswith(os.linesep+ind):
1448 1480 return outstr[:-len(ind)]
1449 1481 else:
1450 1482 return outstr
1451 1483
1452 1484 #-----------------------------------------------------------------------------
1453 1485 def native_line_ends(filename,backup=1):
1454 1486 """Convert (in-place) a file to line-ends native to the current OS.
1455 1487
1456 1488 If the optional backup argument is given as false, no backup of the
1457 1489 original file is left. """
1458 1490
1459 1491 backup_suffixes = {'posix':'~','dos':'.bak','nt':'.bak','mac':'.bak'}
1460 1492
1461 1493 bak_filename = filename + backup_suffixes[os.name]
1462 1494
1463 1495 original = open(filename).read()
1464 1496 shutil.copy2(filename,bak_filename)
1465 1497 try:
1466 1498 new = open(filename,'wb')
1467 1499 new.write(os.linesep.join(original.splitlines()))
1468 1500 new.write(os.linesep) # ALWAYS put an eol at the end of the file
1469 1501 new.close()
1470 1502 except:
1471 1503 os.rename(bak_filename,filename)
1472 1504 if not backup:
1473 1505 try:
1474 1506 os.remove(bak_filename)
1475 1507 except:
1476 1508 pass
1477 1509
1478 1510 #----------------------------------------------------------------------------
1479 1511 def get_pager_cmd(pager_cmd = None):
1480 1512 """Return a pager command.
1481 1513
1482 1514 Makes some attempts at finding an OS-correct one."""
1483 1515
1484 1516 if os.name == 'posix':
1485 1517 default_pager_cmd = 'less -r' # -r for color control sequences
1486 1518 elif os.name in ['nt','dos']:
1487 1519 default_pager_cmd = 'type'
1488 1520
1489 1521 if pager_cmd is None:
1490 1522 try:
1491 1523 pager_cmd = os.environ['PAGER']
1492 1524 except:
1493 1525 pager_cmd = default_pager_cmd
1494 1526 return pager_cmd
1495 1527
1496 1528 #-----------------------------------------------------------------------------
1497 1529 def get_pager_start(pager,start):
1498 1530 """Return the string for paging files with an offset.
1499 1531
1500 1532 This is the '+N' argument which less and more (under Unix) accept.
1501 1533 """
1502 1534
1503 1535 if pager in ['less','more']:
1504 1536 if start:
1505 1537 start_string = '+' + str(start)
1506 1538 else:
1507 1539 start_string = ''
1508 1540 else:
1509 1541 start_string = ''
1510 1542 return start_string
1511 1543
1512 1544 #----------------------------------------------------------------------------
1513 1545 # (X)emacs on W32 doesn't like to be bypassed with msvcrt.getch()
1514 1546 if os.name == 'nt' and os.environ.get('TERM','dumb') != 'emacs':
1515 1547 import msvcrt
1516 1548 def page_more():
1517 1549 """ Smart pausing between pages
1518 1550
1519 1551 @return: True if need print more lines, False if quit
1520 1552 """
1521 1553 Term.cout.write('---Return to continue, q to quit--- ')
1522 1554 ans = msvcrt.getch()
1523 1555 if ans in ("q", "Q"):
1524 1556 result = False
1525 1557 else:
1526 1558 result = True
1527 1559 Term.cout.write("\b"*37 + " "*37 + "\b"*37)
1528 1560 return result
1529 1561 else:
1530 1562 def page_more():
1531 1563 ans = raw_input('---Return to continue, q to quit--- ')
1532 1564 if ans.lower().startswith('q'):
1533 1565 return False
1534 1566 else:
1535 1567 return True
1536 1568
1537 1569 esc_re = re.compile(r"(\x1b[^m]+m)")
1538 1570
1539 1571 def page_dumb(strng,start=0,screen_lines=25):
1540 1572 """Very dumb 'pager' in Python, for when nothing else works.
1541 1573
1542 1574 Only moves forward, same interface as page(), except for pager_cmd and
1543 1575 mode."""
1544 1576
1545 1577 out_ln = strng.splitlines()[start:]
1546 1578 screens = chop(out_ln,screen_lines-1)
1547 1579 if len(screens) == 1:
1548 1580 print >>Term.cout, os.linesep.join(screens[0])
1549 1581 else:
1550 1582 last_escape = ""
1551 1583 for scr in screens[0:-1]:
1552 1584 hunk = os.linesep.join(scr)
1553 1585 print >>Term.cout, last_escape + hunk
1554 1586 if not page_more():
1555 1587 return
1556 1588 esc_list = esc_re.findall(hunk)
1557 1589 if len(esc_list) > 0:
1558 1590 last_escape = esc_list[-1]
1559 1591 print >>Term.cout, last_escape + os.linesep.join(screens[-1])
1560 1592
1561 1593 #----------------------------------------------------------------------------
1562 1594 def page(strng,start=0,screen_lines=0,pager_cmd = None):
1563 1595 """Print a string, piping through a pager after a certain length.
1564 1596
1565 1597 The screen_lines parameter specifies the number of *usable* lines of your
1566 1598 terminal screen (total lines minus lines you need to reserve to show other
1567 1599 information).
1568 1600
1569 1601 If you set screen_lines to a number <=0, page() will try to auto-determine
1570 1602 your screen size and will only use up to (screen_size+screen_lines) for
1571 1603 printing, paging after that. That is, if you want auto-detection but need
1572 1604 to reserve the bottom 3 lines of the screen, use screen_lines = -3, and for
1573 1605 auto-detection without any lines reserved simply use screen_lines = 0.
1574 1606
1575 1607 If a string won't fit in the allowed lines, it is sent through the
1576 1608 specified pager command. If none given, look for PAGER in the environment,
1577 1609 and ultimately default to less.
1578 1610
1579 1611 If no system pager works, the string is sent through a 'dumb pager'
1580 1612 written in python, very simplistic.
1581 1613 """
1582 1614
1583 1615 # Some routines may auto-compute start offsets incorrectly and pass a
1584 1616 # negative value. Offset to 0 for robustness.
1585 1617 start = max(0,start)
1586 1618
1587 1619 # first, try the hook
1588 1620 ip = IPython.ipapi.get()
1589 1621 if ip:
1590 1622 try:
1591 1623 ip.IP.hooks.show_in_pager(strng)
1592 1624 return
1593 1625 except IPython.ipapi.TryNext:
1594 1626 pass
1595 1627
1596 1628 # Ugly kludge, but calling curses.initscr() flat out crashes in emacs
1597 1629 TERM = os.environ.get('TERM','dumb')
1598 1630 if TERM in ['dumb','emacs'] and os.name != 'nt':
1599 1631 print strng
1600 1632 return
1601 1633 # chop off the topmost part of the string we don't want to see
1602 1634 str_lines = strng.split(os.linesep)[start:]
1603 1635 str_toprint = os.linesep.join(str_lines)
1604 1636 num_newlines = len(str_lines)
1605 1637 len_str = len(str_toprint)
1606 1638
1607 1639 # Dumb heuristics to guesstimate number of on-screen lines the string
1608 1640 # takes. Very basic, but good enough for docstrings in reasonable
1609 1641 # terminals. If someone later feels like refining it, it's not hard.
1610 1642 numlines = max(num_newlines,int(len_str/80)+1)
1611 1643
1612 1644 if os.name == "nt":
1613 1645 screen_lines_def = get_console_size(defaulty=25)[1]
1614 1646 else:
1615 1647 screen_lines_def = 25 # default value if we can't auto-determine
1616 1648
1617 1649 # auto-determine screen size
1618 1650 if screen_lines <= 0:
1619 1651 if TERM=='xterm':
1620 1652 use_curses = USE_CURSES
1621 1653 else:
1622 1654 # curses causes problems on many terminals other than xterm.
1623 1655 use_curses = False
1624 1656 if use_curses:
1625 1657 # There is a bug in curses, where *sometimes* it fails to properly
1626 1658 # initialize, and then after the endwin() call is made, the
1627 1659 # terminal is left in an unusable state. Rather than trying to
1628 1660 # check everytime for this (by requesting and comparing termios
1629 1661 # flags each time), we just save the initial terminal state and
1630 1662 # unconditionally reset it every time. It's cheaper than making
1631 1663 # the checks.
1632 1664 term_flags = termios.tcgetattr(sys.stdout)
1633 1665 scr = curses.initscr()
1634 1666 screen_lines_real,screen_cols = scr.getmaxyx()
1635 1667 curses.endwin()
1636 1668 # Restore terminal state in case endwin() didn't.
1637 1669 termios.tcsetattr(sys.stdout,termios.TCSANOW,term_flags)
1638 1670 # Now we have what we needed: the screen size in rows/columns
1639 1671 screen_lines += screen_lines_real
1640 1672 #print '***Screen size:',screen_lines_real,'lines x',\
1641 1673 #screen_cols,'columns.' # dbg
1642 1674 else:
1643 1675 screen_lines += screen_lines_def
1644 1676
1645 1677 #print 'numlines',numlines,'screenlines',screen_lines # dbg
1646 1678 if numlines <= screen_lines :
1647 1679 #print '*** normal print' # dbg
1648 1680 print >>Term.cout, str_toprint
1649 1681 else:
1650 1682 # Try to open pager and default to internal one if that fails.
1651 1683 # All failure modes are tagged as 'retval=1', to match the return
1652 1684 # value of a failed system command. If any intermediate attempt
1653 1685 # sets retval to 1, at the end we resort to our own page_dumb() pager.
1654 1686 pager_cmd = get_pager_cmd(pager_cmd)
1655 1687 pager_cmd += ' ' + get_pager_start(pager_cmd,start)
1656 1688 if os.name == 'nt':
1657 1689 if pager_cmd.startswith('type'):
1658 1690 # The default WinXP 'type' command is failing on complex strings.
1659 1691 retval = 1
1660 1692 else:
1661 1693 tmpname = tempfile.mktemp('.txt')
1662 1694 tmpfile = file(tmpname,'wt')
1663 1695 tmpfile.write(strng)
1664 1696 tmpfile.close()
1665 1697 cmd = "%s < %s" % (pager_cmd,tmpname)
1666 1698 if os.system(cmd):
1667 1699 retval = 1
1668 1700 else:
1669 1701 retval = None
1670 1702 os.remove(tmpname)
1671 1703 else:
1672 1704 try:
1673 1705 retval = None
1674 1706 # if I use popen4, things hang. No idea why.
1675 1707 #pager,shell_out = os.popen4(pager_cmd)
1676 1708 pager = os.popen(pager_cmd,'w')
1677 1709 pager.write(strng)
1678 1710 pager.close()
1679 1711 retval = pager.close() # success returns None
1680 1712 except IOError,msg: # broken pipe when user quits
1681 1713 if msg.args == (32,'Broken pipe'):
1682 1714 retval = None
1683 1715 else:
1684 1716 retval = 1
1685 1717 except OSError:
1686 1718 # Other strange problems, sometimes seen in Win2k/cygwin
1687 1719 retval = 1
1688 1720 if retval is not None:
1689 1721 page_dumb(strng,screen_lines=screen_lines)
1690 1722
1691 1723 #----------------------------------------------------------------------------
1692 1724 def page_file(fname,start = 0, pager_cmd = None):
1693 1725 """Page a file, using an optional pager command and starting line.
1694 1726 """
1695 1727
1696 1728 pager_cmd = get_pager_cmd(pager_cmd)
1697 1729 pager_cmd += ' ' + get_pager_start(pager_cmd,start)
1698 1730
1699 1731 try:
1700 1732 if os.environ['TERM'] in ['emacs','dumb']:
1701 1733 raise EnvironmentError
1702 1734 xsys(pager_cmd + ' ' + fname)
1703 1735 except:
1704 1736 try:
1705 1737 if start > 0:
1706 1738 start -= 1
1707 1739 page(open(fname).read(),start)
1708 1740 except:
1709 1741 print 'Unable to show file',`fname`
1710 1742
1711 1743
1712 1744 #----------------------------------------------------------------------------
1713 1745 def snip_print(str,width = 75,print_full = 0,header = ''):
1714 1746 """Print a string snipping the midsection to fit in width.
1715 1747
1716 1748 print_full: mode control:
1717 1749 - 0: only snip long strings
1718 1750 - 1: send to page() directly.
1719 1751 - 2: snip long strings and ask for full length viewing with page()
1720 1752 Return 1 if snipping was necessary, 0 otherwise."""
1721 1753
1722 1754 if print_full == 1:
1723 1755 page(header+str)
1724 1756 return 0
1725 1757
1726 1758 print header,
1727 1759 if len(str) < width:
1728 1760 print str
1729 1761 snip = 0
1730 1762 else:
1731 1763 whalf = int((width -5)/2)
1732 1764 print str[:whalf] + ' <...> ' + str[-whalf:]
1733 1765 snip = 1
1734 1766 if snip and print_full == 2:
1735 1767 if raw_input(header+' Snipped. View (y/n)? [N]').lower() == 'y':
1736 1768 page(str)
1737 1769 return snip
1738 1770
1739 1771 #****************************************************************************
1740 1772 # lists, dicts and structures
1741 1773
1742 1774 def belong(candidates,checklist):
1743 1775 """Check whether a list of items appear in a given list of options.
1744 1776
1745 1777 Returns a list of 1 and 0, one for each candidate given."""
1746 1778
1747 1779 return [x in checklist for x in candidates]
1748 1780
1749 1781 #----------------------------------------------------------------------------
1750 1782 def uniq_stable(elems):
1751 1783 """uniq_stable(elems) -> list
1752 1784
1753 1785 Return from an iterable, a list of all the unique elements in the input,
1754 1786 but maintaining the order in which they first appear.
1755 1787
1756 1788 A naive solution to this problem which just makes a dictionary with the
1757 1789 elements as keys fails to respect the stability condition, since
1758 1790 dictionaries are unsorted by nature.
1759 1791
1760 1792 Note: All elements in the input must be valid dictionary keys for this
1761 1793 routine to work, as it internally uses a dictionary for efficiency
1762 1794 reasons."""
1763 1795
1764 1796 unique = []
1765 1797 unique_dict = {}
1766 1798 for nn in elems:
1767 1799 if nn not in unique_dict:
1768 1800 unique.append(nn)
1769 1801 unique_dict[nn] = None
1770 1802 return unique
1771 1803
1772 1804 #----------------------------------------------------------------------------
1773 1805 class NLprinter:
1774 1806 """Print an arbitrarily nested list, indicating index numbers.
1775 1807
1776 1808 An instance of this class called nlprint is available and callable as a
1777 1809 function.
1778 1810
1779 1811 nlprint(list,indent=' ',sep=': ') -> prints indenting each level by 'indent'
1780 1812 and using 'sep' to separate the index from the value. """
1781 1813
1782 1814 def __init__(self):
1783 1815 self.depth = 0
1784 1816
1785 1817 def __call__(self,lst,pos='',**kw):
1786 1818 """Prints the nested list numbering levels."""
1787 1819 kw.setdefault('indent',' ')
1788 1820 kw.setdefault('sep',': ')
1789 1821 kw.setdefault('start',0)
1790 1822 kw.setdefault('stop',len(lst))
1791 1823 # we need to remove start and stop from kw so they don't propagate
1792 1824 # into a recursive call for a nested list.
1793 1825 start = kw['start']; del kw['start']
1794 1826 stop = kw['stop']; del kw['stop']
1795 1827 if self.depth == 0 and 'header' in kw.keys():
1796 1828 print kw['header']
1797 1829
1798 1830 for idx in range(start,stop):
1799 1831 elem = lst[idx]
1800 1832 if type(elem)==type([]):
1801 1833 self.depth += 1
1802 1834 self.__call__(elem,itpl('$pos$idx,'),**kw)
1803 1835 self.depth -= 1
1804 1836 else:
1805 1837 printpl(kw['indent']*self.depth+'$pos$idx$kw["sep"]$elem')
1806 1838
1807 1839 nlprint = NLprinter()
1808 1840 #----------------------------------------------------------------------------
1809 1841 def all_belong(candidates,checklist):
1810 1842 """Check whether a list of items ALL appear in a given list of options.
1811 1843
1812 1844 Returns a single 1 or 0 value."""
1813 1845
1814 1846 return 1-(0 in [x in checklist for x in candidates])
1815 1847
1816 1848 #----------------------------------------------------------------------------
1817 1849 def sort_compare(lst1,lst2,inplace = 1):
1818 1850 """Sort and compare two lists.
1819 1851
1820 1852 By default it does it in place, thus modifying the lists. Use inplace = 0
1821 1853 to avoid that (at the cost of temporary copy creation)."""
1822 1854 if not inplace:
1823 1855 lst1 = lst1[:]
1824 1856 lst2 = lst2[:]
1825 1857 lst1.sort(); lst2.sort()
1826 1858 return lst1 == lst2
1827 1859
1828 1860 #----------------------------------------------------------------------------
1829 1861 def list2dict(lst):
1830 1862 """Takes a list of (key,value) pairs and turns it into a dict."""
1831 1863
1832 1864 dic = {}
1833 1865 for k,v in lst: dic[k] = v
1834 1866 return dic
1835 1867
1836 1868 #----------------------------------------------------------------------------
1837 1869 def list2dict2(lst,default=''):
1838 1870 """Takes a list and turns it into a dict.
1839 1871 Much slower than list2dict, but more versatile. This version can take
1840 1872 lists with sublists of arbitrary length (including sclars)."""
1841 1873
1842 1874 dic = {}
1843 1875 for elem in lst:
1844 1876 if type(elem) in (types.ListType,types.TupleType):
1845 1877 size = len(elem)
1846 1878 if size == 0:
1847 1879 pass
1848 1880 elif size == 1:
1849 1881 dic[elem] = default
1850 1882 else:
1851 1883 k,v = elem[0], elem[1:]
1852 1884 if len(v) == 1: v = v[0]
1853 1885 dic[k] = v
1854 1886 else:
1855 1887 dic[elem] = default
1856 1888 return dic
1857 1889
1858 1890 #----------------------------------------------------------------------------
1859 1891 def flatten(seq):
1860 1892 """Flatten a list of lists (NOT recursive, only works for 2d lists)."""
1861 1893
1862 1894 return [x for subseq in seq for x in subseq]
1863 1895
1864 1896 #----------------------------------------------------------------------------
1865 1897 def get_slice(seq,start=0,stop=None,step=1):
1866 1898 """Get a slice of a sequence with variable step. Specify start,stop,step."""
1867 1899 if stop == None:
1868 1900 stop = len(seq)
1869 1901 item = lambda i: seq[i]
1870 1902 return map(item,xrange(start,stop,step))
1871 1903
1872 1904 #----------------------------------------------------------------------------
1873 1905 def chop(seq,size):
1874 1906 """Chop a sequence into chunks of the given size."""
1875 1907 chunk = lambda i: seq[i:i+size]
1876 1908 return map(chunk,xrange(0,len(seq),size))
1877 1909
1878 1910 #----------------------------------------------------------------------------
1879 1911 # with is a keyword as of python 2.5, so this function is renamed to withobj
1880 1912 # from its old 'with' name.
1881 1913 def with_obj(object, **args):
1882 1914 """Set multiple attributes for an object, similar to Pascal's with.
1883 1915
1884 1916 Example:
1885 1917 with_obj(jim,
1886 1918 born = 1960,
1887 1919 haircolour = 'Brown',
1888 1920 eyecolour = 'Green')
1889 1921
1890 1922 Credit: Greg Ewing, in
1891 1923 http://mail.python.org/pipermail/python-list/2001-May/040703.html.
1892 1924
1893 1925 NOTE: up until IPython 0.7.2, this was called simply 'with', but 'with'
1894 1926 has become a keyword for Python 2.5, so we had to rename it."""
1895 1927
1896 1928 object.__dict__.update(args)
1897 1929
1898 1930 #----------------------------------------------------------------------------
1899 1931 def setattr_list(obj,alist,nspace = None):
1900 1932 """Set a list of attributes for an object taken from a namespace.
1901 1933
1902 1934 setattr_list(obj,alist,nspace) -> sets in obj all the attributes listed in
1903 1935 alist with their values taken from nspace, which must be a dict (something
1904 1936 like locals() will often do) If nspace isn't given, locals() of the
1905 1937 *caller* is used, so in most cases you can omit it.
1906 1938
1907 1939 Note that alist can be given as a string, which will be automatically
1908 1940 split into a list on whitespace. If given as a list, it must be a list of
1909 1941 *strings* (the variable names themselves), not of variables."""
1910 1942
1911 1943 # this grabs the local variables from the *previous* call frame -- that is
1912 1944 # the locals from the function that called setattr_list().
1913 1945 # - snipped from weave.inline()
1914 1946 if nspace is None:
1915 1947 call_frame = sys._getframe().f_back
1916 1948 nspace = call_frame.f_locals
1917 1949
1918 1950 if type(alist) in StringTypes:
1919 1951 alist = alist.split()
1920 1952 for attr in alist:
1921 1953 val = eval(attr,nspace)
1922 1954 setattr(obj,attr,val)
1923 1955
1924 1956 #----------------------------------------------------------------------------
1925 1957 def getattr_list(obj,alist,*args):
1926 1958 """getattr_list(obj,alist[, default]) -> attribute list.
1927 1959
1928 1960 Get a list of named attributes for an object. When a default argument is
1929 1961 given, it is returned when the attribute doesn't exist; without it, an
1930 1962 exception is raised in that case.
1931 1963
1932 1964 Note that alist can be given as a string, which will be automatically
1933 1965 split into a list on whitespace. If given as a list, it must be a list of
1934 1966 *strings* (the variable names themselves), not of variables."""
1935 1967
1936 1968 if type(alist) in StringTypes:
1937 1969 alist = alist.split()
1938 1970 if args:
1939 1971 if len(args)==1:
1940 1972 default = args[0]
1941 1973 return map(lambda attr: getattr(obj,attr,default),alist)
1942 1974 else:
1943 1975 raise ValueError,'getattr_list() takes only one optional argument'
1944 1976 else:
1945 1977 return map(lambda attr: getattr(obj,attr),alist)
1946 1978
1947 1979 #----------------------------------------------------------------------------
1948 1980 def map_method(method,object_list,*argseq,**kw):
1949 1981 """map_method(method,object_list,*args,**kw) -> list
1950 1982
1951 1983 Return a list of the results of applying the methods to the items of the
1952 1984 argument sequence(s). If more than one sequence is given, the method is
1953 1985 called with an argument list consisting of the corresponding item of each
1954 1986 sequence. All sequences must be of the same length.
1955 1987
1956 1988 Keyword arguments are passed verbatim to all objects called.
1957 1989
1958 1990 This is Python code, so it's not nearly as fast as the builtin map()."""
1959 1991
1960 1992 out_list = []
1961 1993 idx = 0
1962 1994 for object in object_list:
1963 1995 try:
1964 1996 handler = getattr(object, method)
1965 1997 except AttributeError:
1966 1998 out_list.append(None)
1967 1999 else:
1968 2000 if argseq:
1969 2001 args = map(lambda lst:lst[idx],argseq)
1970 2002 #print 'ob',object,'hand',handler,'ar',args # dbg
1971 2003 out_list.append(handler(args,**kw))
1972 2004 else:
1973 2005 out_list.append(handler(**kw))
1974 2006 idx += 1
1975 2007 return out_list
1976 2008
1977 2009 #----------------------------------------------------------------------------
1978 2010 def get_class_members(cls):
1979 2011 ret = dir(cls)
1980 2012 if hasattr(cls,'__bases__'):
1981 2013 for base in cls.__bases__:
1982 2014 ret.extend(get_class_members(base))
1983 2015 return ret
1984 2016
1985 2017 #----------------------------------------------------------------------------
1986 2018 def dir2(obj):
1987 2019 """dir2(obj) -> list of strings
1988 2020
1989 2021 Extended version of the Python builtin dir(), which does a few extra
1990 2022 checks, and supports common objects with unusual internals that confuse
1991 2023 dir(), such as Traits and PyCrust.
1992 2024
1993 2025 This version is guaranteed to return only a list of true strings, whereas
1994 2026 dir() returns anything that objects inject into themselves, even if they
1995 2027 are later not really valid for attribute access (many extension libraries
1996 2028 have such bugs).
1997 2029 """
1998 2030
1999 2031 # Start building the attribute list via dir(), and then complete it
2000 2032 # with a few extra special-purpose calls.
2001 2033 words = dir(obj)
2002 2034
2003 2035 if hasattr(obj,'__class__'):
2004 2036 words.append('__class__')
2005 2037 words.extend(get_class_members(obj.__class__))
2006 2038 #if '__base__' in words: 1/0
2007 2039
2008 2040 # Some libraries (such as traits) may introduce duplicates, we want to
2009 2041 # track and clean this up if it happens
2010 2042 may_have_dupes = False
2011 2043
2012 2044 # this is the 'dir' function for objects with Enthought's traits
2013 2045 if hasattr(obj, 'trait_names'):
2014 2046 try:
2015 2047 words.extend(obj.trait_names())
2016 2048 may_have_dupes = True
2017 2049 except TypeError:
2018 2050 # This will happen if `obj` is a class and not an instance.
2019 2051 pass
2020 2052
2021 2053 # Support for PyCrust-style _getAttributeNames magic method.
2022 2054 if hasattr(obj, '_getAttributeNames'):
2023 2055 try:
2024 2056 words.extend(obj._getAttributeNames())
2025 2057 may_have_dupes = True
2026 2058 except TypeError:
2027 2059 # `obj` is a class and not an instance. Ignore
2028 2060 # this error.
2029 2061 pass
2030 2062
2031 2063 if may_have_dupes:
2032 2064 # eliminate possible duplicates, as some traits may also
2033 2065 # appear as normal attributes in the dir() call.
2034 2066 words = list(set(words))
2035 2067 words.sort()
2036 2068
2037 2069 # filter out non-string attributes which may be stuffed by dir() calls
2038 2070 # and poor coding in third-party modules
2039 2071 return [w for w in words if isinstance(w, basestring)]
2040 2072
2041 2073 #----------------------------------------------------------------------------
2042 2074 def import_fail_info(mod_name,fns=None):
2043 2075 """Inform load failure for a module."""
2044 2076
2045 2077 if fns == None:
2046 2078 warn("Loading of %s failed.\n" % (mod_name,))
2047 2079 else:
2048 2080 warn("Loading of %s from %s failed.\n" % (fns,mod_name))
2049 2081
2050 2082 #----------------------------------------------------------------------------
2051 2083 # Proposed popitem() extension, written as a method
2052 2084
2053 2085
2054 2086 class NotGiven: pass
2055 2087
2056 2088 def popkey(dct,key,default=NotGiven):
2057 2089 """Return dct[key] and delete dct[key].
2058 2090
2059 2091 If default is given, return it if dct[key] doesn't exist, otherwise raise
2060 2092 KeyError. """
2061 2093
2062 2094 try:
2063 2095 val = dct[key]
2064 2096 except KeyError:
2065 2097 if default is NotGiven:
2066 2098 raise
2067 2099 else:
2068 2100 return default
2069 2101 else:
2070 2102 del dct[key]
2071 2103 return val
2072 2104
2073 2105 def wrap_deprecated(func, suggest = '<nothing>'):
2074 2106 def newFunc(*args, **kwargs):
2075 2107 warnings.warn("Call to deprecated function %s, use %s instead" %
2076 2108 ( func.__name__, suggest),
2077 2109 category=DeprecationWarning,
2078 2110 stacklevel = 2)
2079 2111 return func(*args, **kwargs)
2080 2112 return newFunc
2081 2113
2082 2114
2083 2115 def _num_cpus_unix():
2084 2116 """Return the number of active CPUs on a Unix system."""
2085 2117 return os.sysconf("SC_NPROCESSORS_ONLN")
2086 2118
2087 2119
2088 2120 def _num_cpus_darwin():
2089 2121 """Return the number of active CPUs on a Darwin system."""
2090 2122 p = subprocess.Popen(['sysctl','-n','hw.ncpu'],stdout=subprocess.PIPE)
2091 2123 return p.stdout.read()
2092 2124
2093 2125
2094 2126 def _num_cpus_windows():
2095 2127 """Return the number of active CPUs on a Windows system."""
2096 2128 return os.environ.get("NUMBER_OF_PROCESSORS")
2097 2129
2098 2130
2099 2131 def num_cpus():
2100 2132 """Return the effective number of CPUs in the system as an integer.
2101 2133
2102 2134 This cross-platform function makes an attempt at finding the total number of
2103 2135 available CPUs in the system, as returned by various underlying system and
2104 2136 python calls.
2105 2137
2106 2138 If it can't find a sensible answer, it returns 1 (though an error *may* make
2107 2139 it return a large positive number that's actually incorrect).
2108 2140 """
2109 2141
2110 2142 # Many thanks to the Parallel Python project (http://www.parallelpython.com)
2111 2143 # for the names of the keys we needed to look up for this function. This
2112 2144 # code was inspired by their equivalent function.
2113 2145
2114 2146 ncpufuncs = {'Linux':_num_cpus_unix,
2115 2147 'Darwin':_num_cpus_darwin,
2116 2148 'Windows':_num_cpus_windows,
2117 2149 # On Vista, python < 2.5.2 has a bug and returns 'Microsoft'
2118 2150 # See http://bugs.python.org/issue1082 for details.
2119 2151 'Microsoft':_num_cpus_windows,
2120 2152 }
2121 2153
2122 2154 ncpufunc = ncpufuncs.get(platform.system(),
2123 2155 # default to unix version (Solaris, AIX, etc)
2124 2156 _num_cpus_unix)
2125 2157
2126 2158 try:
2127 2159 ncpus = max(1,int(ncpufunc()))
2128 2160 except:
2129 2161 ncpus = 1
2130 2162 return ncpus
2131 2163
2132 2164 #*************************** end of file <genutils.py> **********************
1 NO CONTENT: modified file chmod 100755 => 100644
@@ -1,272 +1,281
1 1 # -*- coding: utf-8 -*-
2 2
3 3 """ History related magics and functionality """
4 4
5 5 # Stdlib imports
6 6 import fnmatch
7 7 import os
8 8
9 9 # IPython imports
10 10 from IPython.genutils import Term, ask_yes_no
11 import IPython.ipapi
11 12
12 13 def magic_history(self, parameter_s = ''):
13 14 """Print input history (_i<n> variables), with most recent last.
14 15
15 16 %history -> print at most 40 inputs (some may be multi-line)\\
16 17 %history n -> print at most n inputs\\
17 18 %history n1 n2 -> print inputs between n1 and n2 (n2 not included)\\
18 19
19 20 Each input's number <n> is shown, and is accessible as the
20 21 automatically generated variable _i<n>. Multi-line statements are
21 22 printed starting at a new line for easy copy/paste.
22 23
23 24
24 25 Options:
25 26
26 27 -n: do NOT print line numbers. This is useful if you want to get a
27 28 printout of many lines which can be directly pasted into a text
28 29 editor.
29 30
30 31 This feature is only available if numbered prompts are in use.
31 32
32 33 -t: (default) print the 'translated' history, as IPython understands it.
33 34 IPython filters your input and converts it all into valid Python source
34 35 before executing it (things like magics or aliases are turned into
35 36 function calls, for example). With this option, you'll see the native
36 37 history instead of the user-entered version: '%cd /' will be seen as
37 38 '_ip.magic("%cd /")' instead of '%cd /'.
38 39
39 40 -r: print the 'raw' history, i.e. the actual commands you typed.
40 41
41 42 -g: treat the arg as a pattern to grep for in (full) history.
42 43 This includes the "shadow history" (almost all commands ever written).
43 44 Use '%hist -g' to show full shadow history (may be very long).
44 45 In shadow history, every index nuwber starts with 0.
45 46
46 47 -f FILENAME: instead of printing the output to the screen, redirect it to
47 48 the given file. The file is always overwritten, though IPython asks for
48 49 confirmation first if it already exists.
49 50
50 51
51 52 """
52 53
53 54 ip = self.api
54 55 shell = self.shell
55 56 if not shell.outputcache.do_full_cache:
56 57 print 'This feature is only available if numbered prompts are in use.'
57 58 return
58 59 opts,args = self.parse_options(parameter_s,'gntsrf:',mode='list')
59 60
60 61 # Check if output to specific file was requested.
61 62 try:
62 63 outfname = opts['f']
63 64 except KeyError:
64 65 outfile = Term.cout
65 66 # We don't want to close stdout at the end!
66 67 close_at_end = False
67 68 else:
68 69 if os.path.exists(outfname):
69 70 ans = ask_yes_no("File %r exists. Overwrite?" % outfname)
70 71 if not ans:
71 72 print 'Aborting.'
72 73 return
73 74 else:
74 75 outfile = open(outfname,'w')
75 76 close_at_end = True
76 77
77 78
78 79 if opts.has_key('t'):
79 80 input_hist = shell.input_hist
80 81 elif opts.has_key('r'):
81 82 input_hist = shell.input_hist_raw
82 83 else:
83 84 input_hist = shell.input_hist
84 85
85 86
86 87 default_length = 40
87 88 pattern = None
88 89 if opts.has_key('g'):
89 90 init = 1
90 91 final = len(input_hist)
91 92 parts = parameter_s.split(None,1)
92 93 if len(parts) == 1:
93 94 parts += '*'
94 95 head, pattern = parts
95 96 pattern = "*" + pattern + "*"
96 97 elif len(args) == 0:
97 98 final = len(input_hist)
98 99 init = max(1,final-default_length)
99 100 elif len(args) == 1:
100 101 final = len(input_hist)
101 102 init = max(1,final-int(args[0]))
102 103 elif len(args) == 2:
103 104 init,final = map(int,args)
104 105 else:
105 106 warn('%hist takes 0, 1 or 2 arguments separated by spaces.')
106 107 print self.magic_hist.__doc__
107 108 return
108 109 width = len(str(final))
109 110 line_sep = ['','\n']
110 111 print_nums = not opts.has_key('n')
111 112
112 113 found = False
113 114 if pattern is not None:
114 115 sh = ip.IP.shadowhist.all()
115 116 for idx, s in sh:
116 117 if fnmatch.fnmatch(s, pattern):
117 118 print "0%d: %s" %(idx, s)
118 119 found = True
119 120
120 121 if found:
121 122 print "==="
122 123 print "shadow history ends, fetch by %rep <number> (must start with 0)"
123 124 print "=== start of normal history ==="
124 125
125 126 for in_num in range(init,final):
126 127 inline = input_hist[in_num]
127 128 if pattern is not None and not fnmatch.fnmatch(inline, pattern):
128 129 continue
129 130
130 131 multiline = int(inline.count('\n') > 1)
131 132 if print_nums:
132 133 print >> outfile, \
133 134 '%s:%s' % (str(in_num).ljust(width),line_sep[multiline]),
134 135 print >> outfile, inline,
135 136
136 137 if close_at_end:
137 138 outfile.close()
138 139
139 140
140 141
141 142 def magic_hist(self, parameter_s=''):
142 143 """Alternate name for %history."""
143 144 return self.magic_history(parameter_s)
144 145
145 146
146 147
147 148 def rep_f(self, arg):
148 149 r""" Repeat a command, or get command to input line for editing
149 150
150 151 - %rep (no arguments):
151 152
152 153 Place a string version of last computation result (stored in the special '_'
153 154 variable) to the next input prompt. Allows you to create elaborate command
154 155 lines without using copy-paste::
155 156
156 157 $ l = ["hei", "vaan"]
157 158 $ "".join(l)
158 159 ==> heivaan
159 160 $ %rep
160 161 $ heivaan_ <== cursor blinking
161 162
162 163 %rep 45
163 164
164 165 Place history line 45 to next input prompt. Use %hist to find out the
165 166 number.
166 167
167 168 %rep 1-4 6-7 3
168 169
169 170 Repeat the specified lines immediately. Input slice syntax is the same as
170 171 in %macro and %save.
171 172
172 173 %rep foo
173 174
174 175 Place the most recent line that has the substring "foo" to next input.
175 176 (e.g. 'svn ci -m foobar').
176 177
177 178 """
178 179
179 180
180 181 opts,args = self.parse_options(arg,'',mode='list')
181 182 ip = self.api
182 183 if not args:
183 184 ip.set_next_input(str(ip.user_ns["_"]))
184 185 return
185 186
186 187 if len(args) == 1 and not '-' in args[0]:
187 188 arg = args[0]
188 189 if len(arg) > 1 and arg.startswith('0'):
189 190 # get from shadow hist
190 191 num = int(arg[1:])
191 192 line = self.shadowhist.get(num)
192 193 ip.set_next_input(str(line))
193 194 return
194 195 try:
195 196 num = int(args[0])
196 197 ip.set_next_input(str(ip.IP.input_hist_raw[num]).rstrip())
197 198 return
198 199 except ValueError:
199 200 pass
200 201
201 202 for h in reversed(self.shell.input_hist_raw):
202 203 if 'rep' in h:
203 204 continue
204 205 if fnmatch.fnmatch(h,'*' + arg + '*'):
205 206 ip.set_next_input(str(h).rstrip())
206 207 return
207 208
208 209
209 210 try:
210 211 lines = self.extract_input_slices(args, True)
211 212 print "lines",lines
212 213 ip.runlines(lines)
213 214 except ValueError:
214 215 print "Not found in recent history:", args
215 216
216 217
217 218
218 219 _sentinel = object()
219 220
220 221 class ShadowHist:
221 222 def __init__(self,db):
222 223 # cmd => idx mapping
223 224 self.curidx = 0
224 225 self.db = db
226 self.disabled = False
225 227
226 228 def inc_idx(self):
227 229 idx = self.db.get('shadowhist_idx', 1)
228 230 self.db['shadowhist_idx'] = idx + 1
229 231 return idx
230 232
231 233 def add(self, ent):
232 old = self.db.hget('shadowhist', ent, _sentinel)
233 if old is not _sentinel:
234 if self.disabled:
234 235 return
235 newidx = self.inc_idx()
236 #print "new",newidx # dbg
237 self.db.hset('shadowhist',ent, newidx)
236 try:
237 old = self.db.hget('shadowhist', ent, _sentinel)
238 if old is not _sentinel:
239 return
240 newidx = self.inc_idx()
241 #print "new",newidx # dbg
242 self.db.hset('shadowhist',ent, newidx)
243 except:
244 IPython.ipapi.get().IP.showtraceback()
245 print "WARNING: disabling shadow history"
246 self.disabled = True
238 247
239 248 def all(self):
240 249 d = self.db.hdict('shadowhist')
241 250 items = [(i,s) for (s,i) in d.items()]
242 251 items.sort()
243 252 return items
244 253
245 254 def get(self, idx):
246 255 all = self.all()
247 256
248 257 for k, v in all:
249 258 #print k,v
250 259 if k == idx:
251 260 return v
252 261
253 262 def test_shist():
254 263 from IPython.Extensions import pickleshare
255 264 db = pickleshare.PickleShareDB('~/shist')
256 265 s = ShadowHist(db)
257 266 s.add('hello')
258 267 s.add('world')
259 268 s.add('hello')
260 269 s.add('hello')
261 270 s.add('karhu')
262 271 print "all",s.all()
263 272 print s.get(2)
264 273
265 274 def init_ipython(ip):
266 275 ip.expose_magic("rep",rep_f)
267 276 ip.expose_magic("hist",magic_hist)
268 277 ip.expose_magic("history",magic_history)
269 278
270 279 import ipy_completers
271 280 ipy_completers.quick_completer('%hist' ,'-g -t -r -n')
272 281 #test_shist()
@@ -1,249 +1,252
1 1 """hooks for IPython.
2 2
3 3 In Python, it is possible to overwrite any method of any object if you really
4 4 want to. But IPython exposes a few 'hooks', methods which are _designed_ to
5 5 be overwritten by users for customization purposes. This module defines the
6 6 default versions of all such hooks, which get used by IPython if not
7 7 overridden by the user.
8 8
9 9 hooks are simple functions, but they should be declared with 'self' as their
10 10 first argument, because when activated they are registered into IPython as
11 11 instance methods. The self argument will be the IPython running instance
12 12 itself, so hooks have full access to the entire IPython object.
13 13
14 14 If you wish to define a new hook and activate it, you need to put the
15 15 necessary code into a python file which can be either imported or execfile()'d
16 16 from within your ipythonrc configuration.
17 17
18 18 For example, suppose that you have a module called 'myiphooks' in your
19 19 PYTHONPATH, which contains the following definition:
20 20
21 21 import os
22 22 import IPython.ipapi
23 23 ip = IPython.ipapi.get()
24 24
25 25 def calljed(self,filename, linenum):
26 26 "My editor hook calls the jed editor directly."
27 27 print "Calling my own editor, jed ..."
28 os.system('jed +%d %s' % (linenum,filename))
28 if os.system('jed +%d %s' % (linenum,filename)) != 0:
29 raise ipapi.TryNext()
29 30
30 31 ip.set_hook('editor', calljed)
31 32
32 33 You can then enable the functionality by doing 'import myiphooks'
33 34 somewhere in your configuration files or ipython command line.
34 35
35 36 $Id: hooks.py 2998 2008-01-31 10:06:04Z vivainio $"""
36 37
37 38 #*****************************************************************************
38 39 # Copyright (C) 2005 Fernando Perez. <fperez@colorado.edu>
39 40 #
40 41 # Distributed under the terms of the BSD License. The full license is in
41 42 # the file COPYING, distributed as part of this software.
42 43 #*****************************************************************************
43 44
44 45 from IPython import Release
45 46 from IPython import ipapi
46 47 __author__ = '%s <%s>' % Release.authors['Fernando']
47 48 __license__ = Release.license
48 49 __version__ = Release.version
49 50
50 51 import os,bisect
51 52 from genutils import Term,shell
52 53 from pprint import PrettyPrinter
53 54
54 55 # List here all the default hooks. For now it's just the editor functions
55 56 # but over time we'll move here all the public API for user-accessible things.
56 57 # vds: >>
57 58 __all__ = ['editor', 'fix_error_editor', 'synchronize_with_editor', 'result_display',
58 59 'input_prefilter', 'shutdown_hook', 'late_startup_hook',
59 60 'generate_prompt', 'generate_output_prompt','shell_hook',
60 61 'show_in_pager','pre_prompt_hook', 'pre_runcode_hook']
61 62 # vds: <<
62 63
63 64 pformat = PrettyPrinter().pformat
64 65
65 66 def editor(self,filename, linenum=None):
66 67 """Open the default editor at the given filename and linenumber.
67 68
68 69 This is IPython's default editor hook, you can use it as an example to
69 70 write your own modified one. To set your own editor function as the
70 71 new editor hook, call ip.set_hook('editor',yourfunc)."""
71 72
72 73 # IPython configures a default editor at startup by reading $EDITOR from
73 74 # the environment, and falling back on vi (unix) or notepad (win32).
74 75 editor = self.rc.editor
75 76
76 77 # marker for at which line to open the file (for existing objects)
77 78 if linenum is None or editor=='notepad':
78 79 linemark = ''
79 80 else:
80 81 linemark = '+%d' % int(linenum)
81 82
82 83 # Enclose in quotes if necessary and legal
83 84 if ' ' in editor and os.path.isfile(editor) and editor[0] != '"':
84 85 editor = '"%s"' % editor
85 86
86 87 # Call the actual editor
87 os.system('%s %s %s' % (editor,linemark,filename))
88 if os.system('%s %s %s' % (editor,linemark,filename)) != 0:
89 raise ipapi.TryNext()
88 90
89 91 import tempfile
90 92 def fix_error_editor(self,filename,linenum,column,msg):
91 93 """Open the editor at the given filename, linenumber, column and
92 94 show an error message. This is used for correcting syntax errors.
93 95 The current implementation only has special support for the VIM editor,
94 96 and falls back on the 'editor' hook if VIM is not used.
95 97
96 98 Call ip.set_hook('fix_error_editor',youfunc) to use your own function,
97 99 """
98 100 def vim_quickfix_file():
99 101 t = tempfile.NamedTemporaryFile()
100 102 t.write('%s:%d:%d:%s\n' % (filename,linenum,column,msg))
101 103 t.flush()
102 104 return t
103 105 if os.path.basename(self.rc.editor) != 'vim':
104 106 self.hooks.editor(filename,linenum)
105 107 return
106 108 t = vim_quickfix_file()
107 109 try:
108 os.system('vim --cmd "set errorformat=%f:%l:%c:%m" -q ' + t.name)
110 if os.system('vim --cmd "set errorformat=%f:%l:%c:%m" -q ' + t.name):
111 raise ipapi.TryNext()
109 112 finally:
110 113 t.close()
111 114
112 115 # vds: >>
113 116 def synchronize_with_editor(self, filename, linenum, column):
114 117 pass
115 118 # vds: <<
116 119
117 120 class CommandChainDispatcher:
118 121 """ Dispatch calls to a chain of commands until some func can handle it
119 122
120 123 Usage: instantiate, execute "add" to add commands (with optional
121 124 priority), execute normally via f() calling mechanism.
122 125
123 126 """
124 127 def __init__(self,commands=None):
125 128 if commands is None:
126 129 self.chain = []
127 130 else:
128 131 self.chain = commands
129 132
130 133
131 134 def __call__(self,*args, **kw):
132 135 """ Command chain is called just like normal func.
133 136
134 137 This will call all funcs in chain with the same args as were given to this
135 138 function, and return the result of first func that didn't raise
136 139 TryNext """
137 140
138 141 for prio,cmd in self.chain:
139 142 #print "prio",prio,"cmd",cmd #dbg
140 143 try:
141 144 ret = cmd(*args, **kw)
142 145 return ret
143 146 except ipapi.TryNext, exc:
144 147 if exc.args or exc.kwargs:
145 148 args = exc.args
146 149 kw = exc.kwargs
147 150 # if no function will accept it, raise TryNext up to the caller
148 151 raise ipapi.TryNext
149 152
150 153 def __str__(self):
151 154 return str(self.chain)
152 155
153 156 def add(self, func, priority=0):
154 157 """ Add a func to the cmd chain with given priority """
155 158 bisect.insort(self.chain,(priority,func))
156 159
157 160 def __iter__(self):
158 161 """ Return all objects in chain.
159 162
160 163 Handy if the objects are not callable.
161 164 """
162 165 return iter(self.chain)
163 166
164 167 def result_display(self,arg):
165 168 """ Default display hook.
166 169
167 170 Called for displaying the result to the user.
168 171 """
169 172
170 173 if self.rc.pprint:
171 174 out = pformat(arg)
172 175 if '\n' in out:
173 176 # So that multi-line strings line up with the left column of
174 177 # the screen, instead of having the output prompt mess up
175 178 # their first line.
176 179 Term.cout.write('\n')
177 180 print >>Term.cout, out
178 181 else:
179 182 # By default, the interactive prompt uses repr() to display results,
180 183 # so we should honor this. Users who'd rather use a different
181 184 # mechanism can easily override this hook.
182 185 print >>Term.cout, repr(arg)
183 186 # the default display hook doesn't manipulate the value to put in history
184 187 return None
185 188
186 189 def input_prefilter(self,line):
187 190 """ Default input prefilter
188 191
189 192 This returns the line as unchanged, so that the interpreter
190 193 knows that nothing was done and proceeds with "classic" prefiltering
191 194 (%magics, !shell commands etc.).
192 195
193 196 Note that leading whitespace is not passed to this hook. Prefilter
194 197 can't alter indentation.
195 198
196 199 """
197 200 #print "attempt to rewrite",line #dbg
198 201 return line
199 202
200 203 def shutdown_hook(self):
201 204 """ default shutdown hook
202 205
203 206 Typically, shotdown hooks should raise TryNext so all shutdown ops are done
204 207 """
205 208
206 209 #print "default shutdown hook ok" # dbg
207 210 return
208 211
209 212 def late_startup_hook(self):
210 213 """ Executed after ipython has been constructed and configured
211 214
212 215 """
213 216 #print "default startup hook ok" # dbg
214 217
215 218 def generate_prompt(self, is_continuation):
216 219 """ calculate and return a string with the prompt to display """
217 220 ip = self.api
218 221 if is_continuation:
219 222 return str(ip.IP.outputcache.prompt2)
220 223 return str(ip.IP.outputcache.prompt1)
221 224
222 225 def generate_output_prompt(self):
223 226 ip = self.api
224 227 return str(ip.IP.outputcache.prompt_out)
225 228
226 229 def shell_hook(self,cmd):
227 230 """ Run system/shell command a'la os.system() """
228 231
229 232 shell(cmd, header=self.rc.system_header, verbose=self.rc.system_verbose)
230 233
231 234 def show_in_pager(self,s):
232 235 """ Run a string through pager """
233 236 # raising TryNext here will use the default paging functionality
234 237 raise ipapi.TryNext
235 238
236 239 def pre_prompt_hook(self):
237 240 """ Run before displaying the next prompt
238 241
239 242 Use this e.g. to display output from asynchronous operations (in order
240 243 to not mess up text entry)
241 244 """
242 245
243 246 return None
244 247
245 248 def pre_runcode_hook(self):
246 249 """ Executed before running the (prefiltered) code in IPython """
247 250 return None
248 251
249 252
@@ -1,2686 +1,2695
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 IPython -- An enhanced Interactive Python
4 4
5 5 Requires Python 2.3 or newer.
6 6
7 7 This file contains all the classes and helper functions specific to IPython.
8 8
9 9 """
10 10
11 11 #*****************************************************************************
12 12 # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> and
13 13 # Copyright (C) 2001-2006 Fernando Perez. <fperez@colorado.edu>
14 14 #
15 15 # Distributed under the terms of the BSD License. The full license is in
16 16 # the file COPYING, distributed as part of this software.
17 17 #
18 18 # Note: this code originally subclassed code.InteractiveConsole from the
19 19 # Python standard library. Over time, all of that class has been copied
20 20 # verbatim here for modifications which could not be accomplished by
21 21 # subclassing. At this point, there are no dependencies at all on the code
22 22 # module anymore (it is not even imported). The Python License (sec. 2)
23 23 # allows for this, but it's always nice to acknowledge credit where credit is
24 24 # due.
25 25 #*****************************************************************************
26 26
27 27 #****************************************************************************
28 28 # Modules and globals
29 29
30 30 from IPython import Release
31 31 __author__ = '%s <%s>\n%s <%s>' % \
32 32 ( Release.authors['Janko'] + Release.authors['Fernando'] )
33 33 __license__ = Release.license
34 34 __version__ = Release.version
35 35
36 36 # Python standard modules
37 37 import __main__
38 38 import __builtin__
39 39 import StringIO
40 40 import bdb
41 41 import cPickle as pickle
42 42 import codeop
43 43 import exceptions
44 44 import glob
45 45 import inspect
46 46 import keyword
47 47 import new
48 48 import os
49 49 import pydoc
50 50 import re
51 51 import shutil
52 52 import string
53 53 import sys
54 54 import tempfile
55 55 import traceback
56 56 import types
57 57 import warnings
58 58 warnings.filterwarnings('ignore', r'.*sets module*')
59 59 from sets import Set
60 60 from pprint import pprint, pformat
61 61
62 62 # IPython's own modules
63 63 #import IPython
64 64 from IPython import Debugger,OInspect,PyColorize,ultraTB
65 65 from IPython.ColorANSI import ColorScheme,ColorSchemeTable # too long names
66 66 from IPython.Extensions import pickleshare
67 67 from IPython.FakeModule import FakeModule
68 68 from IPython.Itpl import Itpl,itpl,printpl,ItplNS,itplns
69 69 from IPython.Logger import Logger
70 70 from IPython.Magic import Magic
71 71 from IPython.Prompts import CachedOutput
72 72 from IPython.ipstruct import Struct
73 73 from IPython.background_jobs import BackgroundJobManager
74 74 from IPython.usage import cmd_line_usage,interactive_usage
75 75 from IPython.genutils import *
76 76 from IPython.strdispatch import StrDispatch
77 77 import IPython.ipapi
78 78 import IPython.history
79 79 import IPython.prefilter as prefilter
80 80 import IPython.shadowns
81 81 # Globals
82 82
83 83 # store the builtin raw_input globally, and use this always, in case user code
84 84 # overwrites it (like wx.py.PyShell does)
85 85 raw_input_original = raw_input
86 86
87 87 # compiled regexps for autoindent management
88 88 dedent_re = re.compile(r'^\s+raise|^\s+return|^\s+pass')
89 89
90 90
91 91 #****************************************************************************
92 92 # Some utility function definitions
93 93
94 94 ini_spaces_re = re.compile(r'^(\s+)')
95 95
96 96 def num_ini_spaces(strng):
97 97 """Return the number of initial spaces in a string"""
98 98
99 99 ini_spaces = ini_spaces_re.match(strng)
100 100 if ini_spaces:
101 101 return ini_spaces.end()
102 102 else:
103 103 return 0
104 104
105 105 def softspace(file, newvalue):
106 106 """Copied from code.py, to remove the dependency"""
107 107
108 108 oldvalue = 0
109 109 try:
110 110 oldvalue = file.softspace
111 111 except AttributeError:
112 112 pass
113 113 try:
114 114 file.softspace = newvalue
115 115 except (AttributeError, TypeError):
116 116 # "attribute-less object" or "read-only attributes"
117 117 pass
118 118 return oldvalue
119 119
120 120
121 121 #****************************************************************************
122 122 # Local use exceptions
123 123 class SpaceInInput(exceptions.Exception): pass
124 124
125 125
126 126 #****************************************************************************
127 127 # Local use classes
128 128 class Bunch: pass
129 129
130 130 class Undefined: pass
131 131
132 132 class Quitter(object):
133 133 """Simple class to handle exit, similar to Python 2.5's.
134 134
135 135 It handles exiting in an ipython-safe manner, which the one in Python 2.5
136 136 doesn't do (obviously, since it doesn't know about ipython)."""
137 137
138 138 def __init__(self,shell,name):
139 139 self.shell = shell
140 140 self.name = name
141 141
142 142 def __repr__(self):
143 143 return 'Type %s() to exit.' % self.name
144 144 __str__ = __repr__
145 145
146 146 def __call__(self):
147 147 self.shell.exit()
148 148
149 149 class InputList(list):
150 150 """Class to store user input.
151 151
152 152 It's basically a list, but slices return a string instead of a list, thus
153 153 allowing things like (assuming 'In' is an instance):
154 154
155 155 exec In[4:7]
156 156
157 157 or
158 158
159 159 exec In[5:9] + In[14] + In[21:25]"""
160 160
161 161 def __getslice__(self,i,j):
162 162 return ''.join(list.__getslice__(self,i,j))
163 163
164 164 class SyntaxTB(ultraTB.ListTB):
165 165 """Extension which holds some state: the last exception value"""
166 166
167 167 def __init__(self,color_scheme = 'NoColor'):
168 168 ultraTB.ListTB.__init__(self,color_scheme)
169 169 self.last_syntax_error = None
170 170
171 171 def __call__(self, etype, value, elist):
172 172 self.last_syntax_error = value
173 173 ultraTB.ListTB.__call__(self,etype,value,elist)
174 174
175 175 def clear_err_state(self):
176 176 """Return the current error state and clear it"""
177 177 e = self.last_syntax_error
178 178 self.last_syntax_error = None
179 179 return e
180 180
181 181 #****************************************************************************
182 182 # Main IPython class
183 183
184 184 # FIXME: the Magic class is a mixin for now, and will unfortunately remain so
185 185 # until a full rewrite is made. I've cleaned all cross-class uses of
186 186 # attributes and methods, but too much user code out there relies on the
187 187 # equlity %foo == __IP.magic_foo, so I can't actually remove the mixin usage.
188 188 #
189 189 # But at least now, all the pieces have been separated and we could, in
190 190 # principle, stop using the mixin. This will ease the transition to the
191 191 # chainsaw branch.
192 192
193 193 # For reference, the following is the list of 'self.foo' uses in the Magic
194 194 # class as of 2005-12-28. These are names we CAN'T use in the main ipython
195 195 # class, to prevent clashes.
196 196
197 197 # ['self.__class__', 'self.__dict__', 'self._inspect', 'self._ofind',
198 198 # 'self.arg_err', 'self.extract_input', 'self.format_', 'self.lsmagic',
199 199 # 'self.magic_', 'self.options_table', 'self.parse', 'self.shell',
200 200 # 'self.value']
201 201
202 202 class InteractiveShell(object,Magic):
203 203 """An enhanced console for Python."""
204 204
205 205 # class attribute to indicate whether the class supports threads or not.
206 206 # Subclasses with thread support should override this as needed.
207 207 isthreaded = False
208 208
209 209 def __init__(self,name,usage=None,rc=Struct(opts=None,args=None),
210 210 user_ns=None,user_global_ns=None,banner2='',
211 211 custom_exceptions=((),None),embedded=False):
212 212
213 213 # log system
214 214 self.logger = Logger(self,logfname='ipython_log.py',logmode='rotate')
215 215
216 216 # Job manager (for jobs run as background threads)
217 217 self.jobs = BackgroundJobManager()
218 218
219 219 # Store the actual shell's name
220 220 self.name = name
221 221 self.more = False
222 222
223 223 # We need to know whether the instance is meant for embedding, since
224 224 # global/local namespaces need to be handled differently in that case
225 225 self.embedded = embedded
226 226 if embedded:
227 227 # Control variable so users can, from within the embedded instance,
228 228 # permanently deactivate it.
229 229 self.embedded_active = True
230 230
231 231 # command compiler
232 232 self.compile = codeop.CommandCompiler()
233 233
234 234 # User input buffer
235 235 self.buffer = []
236 236
237 237 # Default name given in compilation of code
238 238 self.filename = '<ipython console>'
239 239
240 240 # Install our own quitter instead of the builtins. For python2.3-2.4,
241 241 # this brings in behavior like 2.5, and for 2.5 it's identical.
242 242 __builtin__.exit = Quitter(self,'exit')
243 243 __builtin__.quit = Quitter(self,'quit')
244 244
245 245 # Make an empty namespace, which extension writers can rely on both
246 246 # existing and NEVER being used by ipython itself. This gives them a
247 247 # convenient location for storing additional information and state
248 248 # their extensions may require, without fear of collisions with other
249 249 # ipython names that may develop later.
250 250 self.meta = Struct()
251 251
252 252 # Create the namespace where the user will operate. user_ns is
253 253 # normally the only one used, and it is passed to the exec calls as
254 254 # the locals argument. But we do carry a user_global_ns namespace
255 255 # given as the exec 'globals' argument, This is useful in embedding
256 256 # situations where the ipython shell opens in a context where the
257 257 # distinction between locals and globals is meaningful. For
258 258 # non-embedded contexts, it is just the same object as the user_ns dict.
259 259
260 260 # FIXME. For some strange reason, __builtins__ is showing up at user
261 261 # level as a dict instead of a module. This is a manual fix, but I
262 262 # should really track down where the problem is coming from. Alex
263 263 # Schmolck reported this problem first.
264 264
265 265 # A useful post by Alex Martelli on this topic:
266 266 # Re: inconsistent value from __builtins__
267 267 # Von: Alex Martelli <aleaxit@yahoo.com>
268 268 # Datum: Freitag 01 Oktober 2004 04:45:34 nachmittags/abends
269 269 # Gruppen: comp.lang.python
270 270
271 271 # Michael Hohn <hohn@hooknose.lbl.gov> wrote:
272 272 # > >>> print type(builtin_check.get_global_binding('__builtins__'))
273 273 # > <type 'dict'>
274 274 # > >>> print type(__builtins__)
275 275 # > <type 'module'>
276 276 # > Is this difference in return value intentional?
277 277
278 278 # Well, it's documented that '__builtins__' can be either a dictionary
279 279 # or a module, and it's been that way for a long time. Whether it's
280 280 # intentional (or sensible), I don't know. In any case, the idea is
281 281 # that if you need to access the built-in namespace directly, you
282 282 # should start with "import __builtin__" (note, no 's') which will
283 283 # definitely give you a module. Yeah, it's somewhat confusing:-(.
284 284
285 285 # These routines return properly built dicts as needed by the rest of
286 286 # the code, and can also be used by extension writers to generate
287 287 # properly initialized namespaces.
288 288 user_ns, user_global_ns = IPython.ipapi.make_user_namespaces(user_ns,
289 289 user_global_ns)
290 290
291 291 # Assign namespaces
292 292 # This is the namespace where all normal user variables live
293 293 self.user_ns = user_ns
294 294 self.user_global_ns = user_global_ns
295 295 # A namespace to keep track of internal data structures to prevent
296 296 # them from cluttering user-visible stuff. Will be updated later
297 297 self.internal_ns = {}
298 298
299 299 # Namespace of system aliases. Each entry in the alias
300 300 # table must be a 2-tuple of the form (N,name), where N is the number
301 301 # of positional arguments of the alias.
302 302 self.alias_table = {}
303 303
304 304 # A table holding all the namespaces IPython deals with, so that
305 305 # introspection facilities can search easily.
306 306 self.ns_table = {'user':user_ns,
307 307 'user_global':user_global_ns,
308 308 'alias':self.alias_table,
309 309 'internal':self.internal_ns,
310 310 'builtin':__builtin__.__dict__
311 311 }
312 312 # The user namespace MUST have a pointer to the shell itself.
313 313 self.user_ns[name] = self
314 314
315 315 # We need to insert into sys.modules something that looks like a
316 316 # module but which accesses the IPython namespace, for shelve and
317 317 # pickle to work interactively. Normally they rely on getting
318 318 # everything out of __main__, but for embedding purposes each IPython
319 319 # instance has its own private namespace, so we can't go shoving
320 320 # everything into __main__.
321 321
322 322 # note, however, that we should only do this for non-embedded
323 323 # ipythons, which really mimic the __main__.__dict__ with their own
324 324 # namespace. Embedded instances, on the other hand, should not do
325 325 # this because they need to manage the user local/global namespaces
326 326 # only, but they live within a 'normal' __main__ (meaning, they
327 327 # shouldn't overtake the execution environment of the script they're
328 328 # embedded in).
329 329
330 330 if not embedded:
331 331 try:
332 332 main_name = self.user_ns['__name__']
333 333 except KeyError:
334 334 raise KeyError,'user_ns dictionary MUST have a "__name__" key'
335 335 else:
336 336 #print "pickle hack in place" # dbg
337 337 #print 'main_name:',main_name # dbg
338 338 sys.modules[main_name] = FakeModule(self.user_ns)
339 339
340 340 # Now that FakeModule produces a real module, we've run into a nasty
341 341 # problem: after script execution (via %run), the module where the user
342 342 # code ran is deleted. Now that this object is a true module (needed
343 343 # so docetst and other tools work correctly), the Python module
344 344 # teardown mechanism runs over it, and sets to None every variable
345 345 # present in that module. This means that later calls to functions
346 346 # defined in the script (which have become interactively visible after
347 347 # script exit) fail, because they hold references to objects that have
348 348 # become overwritten into None. The only solution I see right now is
349 349 # to protect every FakeModule used by %run by holding an internal
350 350 # reference to it. This private list will be used for that. The
351 351 # %reset command will flush it as well.
352 352 self._user_main_modules = []
353 353
354 354 # List of input with multi-line handling.
355 355 # Fill its zero entry, user counter starts at 1
356 356 self.input_hist = InputList(['\n'])
357 357 # This one will hold the 'raw' input history, without any
358 358 # pre-processing. This will allow users to retrieve the input just as
359 359 # it was exactly typed in by the user, with %hist -r.
360 360 self.input_hist_raw = InputList(['\n'])
361 361
362 362 # list of visited directories
363 363 try:
364 364 self.dir_hist = [os.getcwd()]
365 365 except OSError:
366 366 self.dir_hist = []
367 367
368 368 # dict of output history
369 369 self.output_hist = {}
370 370
371 371 # Get system encoding at startup time. Certain terminals (like Emacs
372 372 # under Win32 have it set to None, and we need to have a known valid
373 373 # encoding to use in the raw_input() method
374 374 try:
375 375 self.stdin_encoding = sys.stdin.encoding or 'ascii'
376 376 except AttributeError:
377 377 self.stdin_encoding = 'ascii'
378 378
379 379 # dict of things NOT to alias (keywords, builtins and some magics)
380 380 no_alias = {}
381 381 no_alias_magics = ['cd','popd','pushd','dhist','alias','unalias']
382 382 for key in keyword.kwlist + no_alias_magics:
383 383 no_alias[key] = 1
384 384 no_alias.update(__builtin__.__dict__)
385 385 self.no_alias = no_alias
386 386
387 387 # make global variables for user access to these
388 388 self.user_ns['_ih'] = self.input_hist
389 389 self.user_ns['_oh'] = self.output_hist
390 390 self.user_ns['_dh'] = self.dir_hist
391 391
392 392 # user aliases to input and output histories
393 393 self.user_ns['In'] = self.input_hist
394 394 self.user_ns['Out'] = self.output_hist
395 395
396 396 self.user_ns['_sh'] = IPython.shadowns
397 397 # Object variable to store code object waiting execution. This is
398 398 # used mainly by the multithreaded shells, but it can come in handy in
399 399 # other situations. No need to use a Queue here, since it's a single
400 400 # item which gets cleared once run.
401 401 self.code_to_run = None
402 402
403 403 # escapes for automatic behavior on the command line
404 404 self.ESC_SHELL = '!'
405 405 self.ESC_SH_CAP = '!!'
406 406 self.ESC_HELP = '?'
407 407 self.ESC_MAGIC = '%'
408 408 self.ESC_QUOTE = ','
409 409 self.ESC_QUOTE2 = ';'
410 410 self.ESC_PAREN = '/'
411 411
412 412 # And their associated handlers
413 413 self.esc_handlers = {self.ESC_PAREN : self.handle_auto,
414 414 self.ESC_QUOTE : self.handle_auto,
415 415 self.ESC_QUOTE2 : self.handle_auto,
416 416 self.ESC_MAGIC : self.handle_magic,
417 417 self.ESC_HELP : self.handle_help,
418 418 self.ESC_SHELL : self.handle_shell_escape,
419 419 self.ESC_SH_CAP : self.handle_shell_escape,
420 420 }
421 421
422 422 # class initializations
423 423 Magic.__init__(self,self)
424 424
425 425 # Python source parser/formatter for syntax highlighting
426 426 pyformat = PyColorize.Parser().format
427 427 self.pycolorize = lambda src: pyformat(src,'str',self.rc['colors'])
428 428
429 429 # hooks holds pointers used for user-side customizations
430 430 self.hooks = Struct()
431 431
432 432 self.strdispatchers = {}
433 433
434 434 # Set all default hooks, defined in the IPython.hooks module.
435 435 hooks = IPython.hooks
436 436 for hook_name in hooks.__all__:
437 437 # default hooks have priority 100, i.e. low; user hooks should have
438 438 # 0-100 priority
439 439 self.set_hook(hook_name,getattr(hooks,hook_name), 100)
440 440 #print "bound hook",hook_name
441 441
442 442 # Flag to mark unconditional exit
443 443 self.exit_now = False
444 444
445 445 self.usage_min = """\
446 446 An enhanced console for Python.
447 447 Some of its features are:
448 448 - Readline support if the readline library is present.
449 449 - Tab completion in the local namespace.
450 450 - Logging of input, see command-line options.
451 451 - System shell escape via ! , eg !ls.
452 452 - Magic commands, starting with a % (like %ls, %pwd, %cd, etc.)
453 453 - Keeps track of locally defined variables via %who, %whos.
454 454 - Show object information with a ? eg ?x or x? (use ?? for more info).
455 455 """
456 456 if usage: self.usage = usage
457 457 else: self.usage = self.usage_min
458 458
459 459 # Storage
460 460 self.rc = rc # This will hold all configuration information
461 461 self.pager = 'less'
462 462 # temporary files used for various purposes. Deleted at exit.
463 463 self.tempfiles = []
464 464
465 465 # Keep track of readline usage (later set by init_readline)
466 466 self.has_readline = False
467 467
468 468 # template for logfile headers. It gets resolved at runtime by the
469 469 # logstart method.
470 470 self.loghead_tpl = \
471 471 """#log# Automatic Logger file. *** THIS MUST BE THE FIRST LINE ***
472 472 #log# DO NOT CHANGE THIS LINE OR THE TWO BELOW
473 473 #log# opts = %s
474 474 #log# args = %s
475 475 #log# It is safe to make manual edits below here.
476 476 #log#-----------------------------------------------------------------------
477 477 """
478 478 # for pushd/popd management
479 479 try:
480 480 self.home_dir = get_home_dir()
481 481 except HomeDirError,msg:
482 482 fatal(msg)
483 483
484 484 self.dir_stack = []
485 485
486 486 # Functions to call the underlying shell.
487 487
488 488 # The first is similar to os.system, but it doesn't return a value,
489 489 # and it allows interpolation of variables in the user's namespace.
490 490 self.system = lambda cmd: \
491 491 self.hooks.shell_hook(self.var_expand(cmd,depth=2))
492 492
493 493 # These are for getoutput and getoutputerror:
494 494 self.getoutput = lambda cmd: \
495 495 getoutput(self.var_expand(cmd,depth=2),
496 496 header=self.rc.system_header,
497 497 verbose=self.rc.system_verbose)
498 498
499 499 self.getoutputerror = lambda cmd: \
500 500 getoutputerror(self.var_expand(cmd,depth=2),
501 501 header=self.rc.system_header,
502 502 verbose=self.rc.system_verbose)
503 503
504 504
505 505 # keep track of where we started running (mainly for crash post-mortem)
506 506 self.starting_dir = os.getcwd()
507 507
508 508 # Various switches which can be set
509 509 self.CACHELENGTH = 5000 # this is cheap, it's just text
510 510 self.BANNER = "Python %(version)s on %(platform)s\n" % sys.__dict__
511 511 self.banner2 = banner2
512 512
513 513 # TraceBack handlers:
514 514
515 515 # Syntax error handler.
516 516 self.SyntaxTB = SyntaxTB(color_scheme='NoColor')
517 517
518 518 # The interactive one is initialized with an offset, meaning we always
519 519 # want to remove the topmost item in the traceback, which is our own
520 520 # internal code. Valid modes: ['Plain','Context','Verbose']
521 521 self.InteractiveTB = ultraTB.AutoFormattedTB(mode = 'Plain',
522 522 color_scheme='NoColor',
523 523 tb_offset = 1)
524 524
525 525 # IPython itself shouldn't crash. This will produce a detailed
526 526 # post-mortem if it does. But we only install the crash handler for
527 527 # non-threaded shells, the threaded ones use a normal verbose reporter
528 528 # and lose the crash handler. This is because exceptions in the main
529 529 # thread (such as in GUI code) propagate directly to sys.excepthook,
530 530 # and there's no point in printing crash dumps for every user exception.
531 531 if self.isthreaded:
532 532 ipCrashHandler = ultraTB.FormattedTB()
533 533 else:
534 534 from IPython import CrashHandler
535 535 ipCrashHandler = CrashHandler.IPythonCrashHandler(self)
536 536 self.set_crash_handler(ipCrashHandler)
537 537
538 538 # and add any custom exception handlers the user may have specified
539 539 self.set_custom_exc(*custom_exceptions)
540 540
541 541 # indentation management
542 542 self.autoindent = False
543 543 self.indent_current_nsp = 0
544 544
545 545 # Make some aliases automatically
546 546 # Prepare list of shell aliases to auto-define
547 547 if os.name == 'posix':
548 548 auto_alias = ('mkdir mkdir', 'rmdir rmdir',
549 549 'mv mv -i','rm rm -i','cp cp -i',
550 550 'cat cat','less less','clear clear',
551 551 # a better ls
552 552 'ls ls -F',
553 553 # long ls
554 554 'll ls -lF')
555 555 # Extra ls aliases with color, which need special treatment on BSD
556 556 # variants
557 557 ls_extra = ( # color ls
558 558 'lc ls -F -o --color',
559 559 # ls normal files only
560 560 'lf ls -F -o --color %l | grep ^-',
561 561 # ls symbolic links
562 562 'lk ls -F -o --color %l | grep ^l',
563 563 # directories or links to directories,
564 564 'ldir ls -F -o --color %l | grep /$',
565 565 # things which are executable
566 566 'lx ls -F -o --color %l | grep ^-..x',
567 567 )
568 568 # The BSDs don't ship GNU ls, so they don't understand the
569 569 # --color switch out of the box
570 570 if 'bsd' in sys.platform:
571 571 ls_extra = ( # ls normal files only
572 572 'lf ls -lF | grep ^-',
573 573 # ls symbolic links
574 574 'lk ls -lF | grep ^l',
575 575 # directories or links to directories,
576 576 'ldir ls -lF | grep /$',
577 577 # things which are executable
578 578 'lx ls -lF | grep ^-..x',
579 579 )
580 580 auto_alias = auto_alias + ls_extra
581 581 elif os.name in ['nt','dos']:
582 582 auto_alias = ('ls dir /on',
583 583 'ddir dir /ad /on', 'ldir dir /ad /on',
584 584 'mkdir mkdir','rmdir rmdir','echo echo',
585 585 'ren ren','cls cls','copy copy')
586 586 else:
587 587 auto_alias = ()
588 588 self.auto_alias = [s.split(None,1) for s in auto_alias]
589 589
590 590
591 591 # Produce a public API instance
592 592 self.api = IPython.ipapi.IPApi(self)
593 593
594 594 # Call the actual (public) initializer
595 595 self.init_auto_alias()
596 596
597 597 # track which builtins we add, so we can clean up later
598 598 self.builtins_added = {}
599 599 # This method will add the necessary builtins for operation, but
600 600 # tracking what it did via the builtins_added dict.
601 601
602 602 #TODO: remove this, redundant
603 603 self.add_builtins()
604 604
605 605
606 606
607 607
608 608 # end __init__
609 609
610 610 def var_expand(self,cmd,depth=0):
611 611 """Expand python variables in a string.
612 612
613 613 The depth argument indicates how many frames above the caller should
614 614 be walked to look for the local namespace where to expand variables.
615 615
616 616 The global namespace for expansion is always the user's interactive
617 617 namespace.
618 618 """
619 619
620 620 return str(ItplNS(cmd,
621 621 self.user_ns, # globals
622 622 # Skip our own frame in searching for locals:
623 623 sys._getframe(depth+1).f_locals # locals
624 624 ))
625 625
626 626 def pre_config_initialization(self):
627 627 """Pre-configuration init method
628 628
629 629 This is called before the configuration files are processed to
630 630 prepare the services the config files might need.
631 631
632 632 self.rc already has reasonable default values at this point.
633 633 """
634 634 rc = self.rc
635 635 try:
636 636 self.db = pickleshare.PickleShareDB(rc.ipythondir + "/db")
637 637 except exceptions.UnicodeDecodeError:
638 638 print "Your ipythondir can't be decoded to unicode!"
639 639 print "Please set HOME environment variable to something that"
640 640 print r"only has ASCII characters, e.g. c:\home"
641 641 print "Now it is",rc.ipythondir
642 642 sys.exit()
643 643 self.shadowhist = IPython.history.ShadowHist(self.db)
644 644
645 645
646 646 def post_config_initialization(self):
647 647 """Post configuration init method
648 648
649 649 This is called after the configuration files have been processed to
650 650 'finalize' the initialization."""
651 651
652 652 rc = self.rc
653 653
654 654 # Object inspector
655 655 self.inspector = OInspect.Inspector(OInspect.InspectColors,
656 656 PyColorize.ANSICodeColors,
657 657 'NoColor',
658 658 rc.object_info_string_level)
659 659
660 660 self.rl_next_input = None
661 661 self.rl_do_indent = False
662 662 # Load readline proper
663 663 if rc.readline:
664 664 self.init_readline()
665 665
666 666
667 667 # local shortcut, this is used a LOT
668 668 self.log = self.logger.log
669 669
670 670 # Initialize cache, set in/out prompts and printing system
671 671 self.outputcache = CachedOutput(self,
672 672 rc.cache_size,
673 673 rc.pprint,
674 674 input_sep = rc.separate_in,
675 675 output_sep = rc.separate_out,
676 676 output_sep2 = rc.separate_out2,
677 677 ps1 = rc.prompt_in1,
678 678 ps2 = rc.prompt_in2,
679 679 ps_out = rc.prompt_out,
680 680 pad_left = rc.prompts_pad_left)
681 681
682 682 # user may have over-ridden the default print hook:
683 683 try:
684 684 self.outputcache.__class__.display = self.hooks.display
685 685 except AttributeError:
686 686 pass
687 687
688 688 # I don't like assigning globally to sys, because it means when
689 689 # embedding instances, each embedded instance overrides the previous
690 690 # choice. But sys.displayhook seems to be called internally by exec,
691 691 # so I don't see a way around it. We first save the original and then
692 692 # overwrite it.
693 693 self.sys_displayhook = sys.displayhook
694 694 sys.displayhook = self.outputcache
695 695
696 696 # Do a proper resetting of doctest, including the necessary displayhook
697 697 # monkeypatching
698 698 try:
699 699 doctest_reload()
700 700 except ImportError:
701 701 warn("doctest module does not exist.")
702 702
703 703 # Set user colors (don't do it in the constructor above so that it
704 704 # doesn't crash if colors option is invalid)
705 705 self.magic_colors(rc.colors)
706 706
707 707 # Set calling of pdb on exceptions
708 708 self.call_pdb = rc.pdb
709 709
710 710 # Load user aliases
711 711 for alias in rc.alias:
712 712 self.magic_alias(alias)
713 713
714 714 self.hooks.late_startup_hook()
715 715
716 716 for cmd in self.rc.autoexec:
717 717 #print "autoexec>",cmd #dbg
718 718 self.api.runlines(cmd)
719 719
720 720 batchrun = False
721 721 for batchfile in [path(arg) for arg in self.rc.args
722 722 if arg.lower().endswith('.ipy')]:
723 723 if not batchfile.isfile():
724 724 print "No such batch file:", batchfile
725 725 continue
726 726 self.api.runlines(batchfile.text())
727 727 batchrun = True
728 728 # without -i option, exit after running the batch file
729 729 if batchrun and not self.rc.interact:
730 730 self.ask_exit()
731 731
732 732 def add_builtins(self):
733 733 """Store ipython references into the builtin namespace.
734 734
735 735 Some parts of ipython operate via builtins injected here, which hold a
736 736 reference to IPython itself."""
737 737
738 738 # TODO: deprecate all of these, they are unsafe
739 739 builtins_new = dict(__IPYTHON__ = self,
740 740 ip_set_hook = self.set_hook,
741 741 jobs = self.jobs,
742 742 ipmagic = wrap_deprecated(self.ipmagic,'_ip.magic()'),
743 743 ipalias = wrap_deprecated(self.ipalias),
744 744 ipsystem = wrap_deprecated(self.ipsystem,'_ip.system()'),
745 745 #_ip = self.api
746 746 )
747 747 for biname,bival in builtins_new.items():
748 748 try:
749 749 # store the orignal value so we can restore it
750 750 self.builtins_added[biname] = __builtin__.__dict__[biname]
751 751 except KeyError:
752 752 # or mark that it wasn't defined, and we'll just delete it at
753 753 # cleanup
754 754 self.builtins_added[biname] = Undefined
755 755 __builtin__.__dict__[biname] = bival
756 756
757 757 # Keep in the builtins a flag for when IPython is active. We set it
758 758 # with setdefault so that multiple nested IPythons don't clobber one
759 759 # another. Each will increase its value by one upon being activated,
760 760 # which also gives us a way to determine the nesting level.
761 761 __builtin__.__dict__.setdefault('__IPYTHON__active',0)
762 762
763 763 def clean_builtins(self):
764 764 """Remove any builtins which might have been added by add_builtins, or
765 765 restore overwritten ones to their previous values."""
766 766 for biname,bival in self.builtins_added.items():
767 767 if bival is Undefined:
768 768 del __builtin__.__dict__[biname]
769 769 else:
770 770 __builtin__.__dict__[biname] = bival
771 771 self.builtins_added.clear()
772 772
773 773 def set_hook(self,name,hook, priority = 50, str_key = None, re_key = None):
774 774 """set_hook(name,hook) -> sets an internal IPython hook.
775 775
776 776 IPython exposes some of its internal API as user-modifiable hooks. By
777 777 adding your function to one of these hooks, you can modify IPython's
778 778 behavior to call at runtime your own routines."""
779 779
780 780 # At some point in the future, this should validate the hook before it
781 781 # accepts it. Probably at least check that the hook takes the number
782 782 # of args it's supposed to.
783 783
784 784 f = new.instancemethod(hook,self,self.__class__)
785 785
786 786 # check if the hook is for strdispatcher first
787 787 if str_key is not None:
788 788 sdp = self.strdispatchers.get(name, StrDispatch())
789 789 sdp.add_s(str_key, f, priority )
790 790 self.strdispatchers[name] = sdp
791 791 return
792 792 if re_key is not None:
793 793 sdp = self.strdispatchers.get(name, StrDispatch())
794 794 sdp.add_re(re.compile(re_key), f, priority )
795 795 self.strdispatchers[name] = sdp
796 796 return
797 797
798 798 dp = getattr(self.hooks, name, None)
799 799 if name not in IPython.hooks.__all__:
800 800 print "Warning! Hook '%s' is not one of %s" % (name, IPython.hooks.__all__ )
801 801 if not dp:
802 802 dp = IPython.hooks.CommandChainDispatcher()
803 803
804 804 try:
805 805 dp.add(f,priority)
806 806 except AttributeError:
807 807 # it was not commandchain, plain old func - replace
808 808 dp = f
809 809
810 810 setattr(self.hooks,name, dp)
811 811
812 812
813 813 #setattr(self.hooks,name,new.instancemethod(hook,self,self.__class__))
814 814
815 815 def set_crash_handler(self,crashHandler):
816 816 """Set the IPython crash handler.
817 817
818 818 This must be a callable with a signature suitable for use as
819 819 sys.excepthook."""
820 820
821 821 # Install the given crash handler as the Python exception hook
822 822 sys.excepthook = crashHandler
823 823
824 824 # The instance will store a pointer to this, so that runtime code
825 825 # (such as magics) can access it. This is because during the
826 826 # read-eval loop, it gets temporarily overwritten (to deal with GUI
827 827 # frameworks).
828 828 self.sys_excepthook = sys.excepthook
829 829
830 830
831 831 def set_custom_exc(self,exc_tuple,handler):
832 832 """set_custom_exc(exc_tuple,handler)
833 833
834 834 Set a custom exception handler, which will be called if any of the
835 835 exceptions in exc_tuple occur in the mainloop (specifically, in the
836 836 runcode() method.
837 837
838 838 Inputs:
839 839
840 840 - exc_tuple: a *tuple* of valid exceptions to call the defined
841 841 handler for. It is very important that you use a tuple, and NOT A
842 842 LIST here, because of the way Python's except statement works. If
843 843 you only want to trap a single exception, use a singleton tuple:
844 844
845 845 exc_tuple == (MyCustomException,)
846 846
847 847 - handler: this must be defined as a function with the following
848 848 basic interface: def my_handler(self,etype,value,tb).
849 849
850 850 This will be made into an instance method (via new.instancemethod)
851 851 of IPython itself, and it will be called if any of the exceptions
852 852 listed in the exc_tuple are caught. If the handler is None, an
853 853 internal basic one is used, which just prints basic info.
854 854
855 855 WARNING: by putting in your own exception handler into IPython's main
856 856 execution loop, you run a very good chance of nasty crashes. This
857 857 facility should only be used if you really know what you are doing."""
858 858
859 859 assert type(exc_tuple)==type(()) , \
860 860 "The custom exceptions must be given AS A TUPLE."
861 861
862 862 def dummy_handler(self,etype,value,tb):
863 863 print '*** Simple custom exception handler ***'
864 864 print 'Exception type :',etype
865 865 print 'Exception value:',value
866 866 print 'Traceback :',tb
867 867 print 'Source code :','\n'.join(self.buffer)
868 868
869 869 if handler is None: handler = dummy_handler
870 870
871 871 self.CustomTB = new.instancemethod(handler,self,self.__class__)
872 872 self.custom_exceptions = exc_tuple
873 873
874 874 def set_custom_completer(self,completer,pos=0):
875 875 """set_custom_completer(completer,pos=0)
876 876
877 877 Adds a new custom completer function.
878 878
879 879 The position argument (defaults to 0) is the index in the completers
880 880 list where you want the completer to be inserted."""
881 881
882 882 newcomp = new.instancemethod(completer,self.Completer,
883 883 self.Completer.__class__)
884 884 self.Completer.matchers.insert(pos,newcomp)
885 885
886 886 def set_completer(self):
887 887 """reset readline's completer to be our own."""
888 888 self.readline.set_completer(self.Completer.complete)
889 889
890 890 def _get_call_pdb(self):
891 891 return self._call_pdb
892 892
893 893 def _set_call_pdb(self,val):
894 894
895 895 if val not in (0,1,False,True):
896 896 raise ValueError,'new call_pdb value must be boolean'
897 897
898 898 # store value in instance
899 899 self._call_pdb = val
900 900
901 901 # notify the actual exception handlers
902 902 self.InteractiveTB.call_pdb = val
903 903 if self.isthreaded:
904 904 try:
905 905 self.sys_excepthook.call_pdb = val
906 906 except:
907 907 warn('Failed to activate pdb for threaded exception handler')
908 908
909 909 call_pdb = property(_get_call_pdb,_set_call_pdb,None,
910 910 'Control auto-activation of pdb at exceptions')
911 911
912 912
913 913 # These special functions get installed in the builtin namespace, to
914 914 # provide programmatic (pure python) access to magics, aliases and system
915 915 # calls. This is important for logging, user scripting, and more.
916 916
917 917 # We are basically exposing, via normal python functions, the three
918 918 # mechanisms in which ipython offers special call modes (magics for
919 919 # internal control, aliases for direct system access via pre-selected
920 920 # names, and !cmd for calling arbitrary system commands).
921 921
922 922 def ipmagic(self,arg_s):
923 923 """Call a magic function by name.
924 924
925 925 Input: a string containing the name of the magic function to call and any
926 926 additional arguments to be passed to the magic.
927 927
928 928 ipmagic('name -opt foo bar') is equivalent to typing at the ipython
929 929 prompt:
930 930
931 931 In[1]: %name -opt foo bar
932 932
933 933 To call a magic without arguments, simply use ipmagic('name').
934 934
935 935 This provides a proper Python function to call IPython's magics in any
936 936 valid Python code you can type at the interpreter, including loops and
937 937 compound statements. It is added by IPython to the Python builtin
938 938 namespace upon initialization."""
939 939
940 940 args = arg_s.split(' ',1)
941 941 magic_name = args[0]
942 942 magic_name = magic_name.lstrip(self.ESC_MAGIC)
943 943
944 944 try:
945 945 magic_args = args[1]
946 946 except IndexError:
947 947 magic_args = ''
948 948 fn = getattr(self,'magic_'+magic_name,None)
949 949 if fn is None:
950 950 error("Magic function `%s` not found." % magic_name)
951 951 else:
952 952 magic_args = self.var_expand(magic_args,1)
953 953 return fn(magic_args)
954 954
955 955 def ipalias(self,arg_s):
956 956 """Call an alias by name.
957 957
958 958 Input: a string containing the name of the alias to call and any
959 959 additional arguments to be passed to the magic.
960 960
961 961 ipalias('name -opt foo bar') is equivalent to typing at the ipython
962 962 prompt:
963 963
964 964 In[1]: name -opt foo bar
965 965
966 966 To call an alias without arguments, simply use ipalias('name').
967 967
968 968 This provides a proper Python function to call IPython's aliases in any
969 969 valid Python code you can type at the interpreter, including loops and
970 970 compound statements. It is added by IPython to the Python builtin
971 971 namespace upon initialization."""
972 972
973 973 args = arg_s.split(' ',1)
974 974 alias_name = args[0]
975 975 try:
976 976 alias_args = args[1]
977 977 except IndexError:
978 978 alias_args = ''
979 979 if alias_name in self.alias_table:
980 980 self.call_alias(alias_name,alias_args)
981 981 else:
982 982 error("Alias `%s` not found." % alias_name)
983 983
984 984 def ipsystem(self,arg_s):
985 985 """Make a system call, using IPython."""
986 986
987 987 self.system(arg_s)
988 988
989 989 def complete(self,text):
990 990 """Return a sorted list of all possible completions on text.
991 991
992 992 Inputs:
993 993
994 994 - text: a string of text to be completed on.
995 995
996 996 This is a wrapper around the completion mechanism, similar to what
997 997 readline does at the command line when the TAB key is hit. By
998 998 exposing it as a method, it can be used by other non-readline
999 999 environments (such as GUIs) for text completion.
1000 1000
1001 1001 Simple usage example:
1002 1002
1003 1003 In [7]: x = 'hello'
1004 1004
1005 1005 In [8]: x
1006 1006 Out[8]: 'hello'
1007 1007
1008 1008 In [9]: print x
1009 1009 hello
1010 1010
1011 1011 In [10]: _ip.IP.complete('x.l')
1012 1012 Out[10]: ['x.ljust', 'x.lower', 'x.lstrip']
1013 1013 """
1014 1014
1015 1015 complete = self.Completer.complete
1016 1016 state = 0
1017 1017 # use a dict so we get unique keys, since ipyhton's multiple
1018 1018 # completers can return duplicates. When we make 2.4 a requirement,
1019 1019 # start using sets instead, which are faster.
1020 1020 comps = {}
1021 1021 while True:
1022 1022 newcomp = complete(text,state,line_buffer=text)
1023 1023 if newcomp is None:
1024 1024 break
1025 1025 comps[newcomp] = 1
1026 1026 state += 1
1027 1027 outcomps = comps.keys()
1028 1028 outcomps.sort()
1029 1029 #print "T:",text,"OC:",outcomps # dbg
1030 1030 #print "vars:",self.user_ns.keys()
1031 1031 return outcomps
1032 1032
1033 1033 def set_completer_frame(self, frame=None):
1034 1034 if frame:
1035 1035 self.Completer.namespace = frame.f_locals
1036 1036 self.Completer.global_namespace = frame.f_globals
1037 1037 else:
1038 1038 self.Completer.namespace = self.user_ns
1039 1039 self.Completer.global_namespace = self.user_global_ns
1040 1040
1041 1041 def init_auto_alias(self):
1042 1042 """Define some aliases automatically.
1043 1043
1044 1044 These are ALL parameter-less aliases"""
1045 1045
1046 1046 for alias,cmd in self.auto_alias:
1047 1047 self.getapi().defalias(alias,cmd)
1048 1048
1049 1049
1050 1050 def alias_table_validate(self,verbose=0):
1051 1051 """Update information about the alias table.
1052 1052
1053 1053 In particular, make sure no Python keywords/builtins are in it."""
1054 1054
1055 1055 no_alias = self.no_alias
1056 1056 for k in self.alias_table.keys():
1057 1057 if k in no_alias:
1058 1058 del self.alias_table[k]
1059 1059 if verbose:
1060 1060 print ("Deleting alias <%s>, it's a Python "
1061 1061 "keyword or builtin." % k)
1062 1062
1063 1063 def set_autoindent(self,value=None):
1064 1064 """Set the autoindent flag, checking for readline support.
1065 1065
1066 1066 If called with no arguments, it acts as a toggle."""
1067 1067
1068 1068 if not self.has_readline:
1069 1069 if os.name == 'posix':
1070 1070 warn("The auto-indent feature requires the readline library")
1071 1071 self.autoindent = 0
1072 1072 return
1073 1073 if value is None:
1074 1074 self.autoindent = not self.autoindent
1075 1075 else:
1076 1076 self.autoindent = value
1077 1077
1078 1078 def rc_set_toggle(self,rc_field,value=None):
1079 1079 """Set or toggle a field in IPython's rc config. structure.
1080 1080
1081 1081 If called with no arguments, it acts as a toggle.
1082 1082
1083 1083 If called with a non-existent field, the resulting AttributeError
1084 1084 exception will propagate out."""
1085 1085
1086 1086 rc_val = getattr(self.rc,rc_field)
1087 1087 if value is None:
1088 1088 value = not rc_val
1089 1089 setattr(self.rc,rc_field,value)
1090 1090
1091 1091 def user_setup(self,ipythondir,rc_suffix,mode='install'):
1092 1092 """Install the user configuration directory.
1093 1093
1094 1094 Can be called when running for the first time or to upgrade the user's
1095 1095 .ipython/ directory with the mode parameter. Valid modes are 'install'
1096 1096 and 'upgrade'."""
1097 1097
1098 1098 def wait():
1099 1099 try:
1100 1100 raw_input("Please press <RETURN> to start IPython.")
1101 1101 except EOFError:
1102 1102 print >> Term.cout
1103 1103 print '*'*70
1104 1104
1105 1105 cwd = os.getcwd() # remember where we started
1106 1106 glb = glob.glob
1107 1107 print '*'*70
1108 1108 if mode == 'install':
1109 1109 print \
1110 1110 """Welcome to IPython. I will try to create a personal configuration directory
1111 1111 where you can customize many aspects of IPython's functionality in:\n"""
1112 1112 else:
1113 1113 print 'I am going to upgrade your configuration in:'
1114 1114
1115 1115 print ipythondir
1116 1116
1117 1117 rcdirend = os.path.join('IPython','UserConfig')
1118 1118 cfg = lambda d: os.path.join(d,rcdirend)
1119 1119 try:
1120 1120 rcdir = filter(os.path.isdir,map(cfg,sys.path))[0]
1121 1121 print "Initializing from configuration",rcdir
1122 1122 except IndexError:
1123 1123 warning = """
1124 1124 Installation error. IPython's directory was not found.
1125 1125
1126 1126 Check the following:
1127 1127
1128 1128 The ipython/IPython directory should be in a directory belonging to your
1129 1129 PYTHONPATH environment variable (that is, it should be in a directory
1130 1130 belonging to sys.path). You can copy it explicitly there or just link to it.
1131 1131
1132 1132 IPython will create a minimal default configuration for you.
1133 1133
1134 1134 """
1135 1135 warn(warning)
1136 1136 wait()
1137 1137
1138 1138 if sys.platform =='win32':
1139 1139 inif = 'ipythonrc.ini'
1140 1140 else:
1141 1141 inif = 'ipythonrc'
1142 1142 minimal_setup = {'ipy_user_conf.py' : 'import ipy_defaults', inif : '# intentionally left blank' }
1143 1143 os.makedirs(ipythondir, mode = 0777)
1144 1144 for f, cont in minimal_setup.items():
1145 1145 open(ipythondir + '/' + f,'w').write(cont)
1146 1146
1147 1147 return
1148 1148
1149 1149 if mode == 'install':
1150 1150 try:
1151 1151 shutil.copytree(rcdir,ipythondir)
1152 1152 os.chdir(ipythondir)
1153 1153 rc_files = glb("ipythonrc*")
1154 1154 for rc_file in rc_files:
1155 1155 os.rename(rc_file,rc_file+rc_suffix)
1156 1156 except:
1157 1157 warning = """
1158 1158
1159 1159 There was a problem with the installation:
1160 1160 %s
1161 1161 Try to correct it or contact the developers if you think it's a bug.
1162 1162 IPython will proceed with builtin defaults.""" % sys.exc_info()[1]
1163 1163 warn(warning)
1164 1164 wait()
1165 1165 return
1166 1166
1167 1167 elif mode == 'upgrade':
1168 1168 try:
1169 1169 os.chdir(ipythondir)
1170 1170 except:
1171 1171 print """
1172 1172 Can not upgrade: changing to directory %s failed. Details:
1173 1173 %s
1174 1174 """ % (ipythondir,sys.exc_info()[1])
1175 1175 wait()
1176 1176 return
1177 1177 else:
1178 1178 sources = glb(os.path.join(rcdir,'[A-Za-z]*'))
1179 1179 for new_full_path in sources:
1180 1180 new_filename = os.path.basename(new_full_path)
1181 1181 if new_filename.startswith('ipythonrc'):
1182 1182 new_filename = new_filename + rc_suffix
1183 1183 # The config directory should only contain files, skip any
1184 1184 # directories which may be there (like CVS)
1185 1185 if os.path.isdir(new_full_path):
1186 1186 continue
1187 1187 if os.path.exists(new_filename):
1188 1188 old_file = new_filename+'.old'
1189 1189 if os.path.exists(old_file):
1190 1190 os.remove(old_file)
1191 1191 os.rename(new_filename,old_file)
1192 1192 shutil.copy(new_full_path,new_filename)
1193 1193 else:
1194 1194 raise ValueError,'unrecognized mode for install:',`mode`
1195 1195
1196 1196 # Fix line-endings to those native to each platform in the config
1197 1197 # directory.
1198 1198 try:
1199 1199 os.chdir(ipythondir)
1200 1200 except:
1201 1201 print """
1202 1202 Problem: changing to directory %s failed.
1203 1203 Details:
1204 1204 %s
1205 1205
1206 1206 Some configuration files may have incorrect line endings. This should not
1207 1207 cause any problems during execution. """ % (ipythondir,sys.exc_info()[1])
1208 1208 wait()
1209 1209 else:
1210 1210 for fname in glb('ipythonrc*'):
1211 1211 try:
1212 1212 native_line_ends(fname,backup=0)
1213 1213 except IOError:
1214 1214 pass
1215 1215
1216 1216 if mode == 'install':
1217 1217 print """
1218 1218 Successful installation!
1219 1219
1220 1220 Please read the sections 'Initial Configuration' and 'Quick Tips' in the
1221 1221 IPython manual (there are both HTML and PDF versions supplied with the
1222 1222 distribution) to make sure that your system environment is properly configured
1223 1223 to take advantage of IPython's features.
1224 1224
1225 1225 Important note: the configuration system has changed! The old system is
1226 1226 still in place, but its setting may be partly overridden by the settings in
1227 1227 "~/.ipython/ipy_user_conf.py" config file. Please take a look at the file
1228 1228 if some of the new settings bother you.
1229 1229
1230 1230 """
1231 1231 else:
1232 1232 print """
1233 1233 Successful upgrade!
1234 1234
1235 1235 All files in your directory:
1236 1236 %(ipythondir)s
1237 1237 which would have been overwritten by the upgrade were backed up with a .old
1238 1238 extension. If you had made particular customizations in those files you may
1239 1239 want to merge them back into the new files.""" % locals()
1240 1240 wait()
1241 1241 os.chdir(cwd)
1242 1242 # end user_setup()
1243 1243
1244 1244 def atexit_operations(self):
1245 1245 """This will be executed at the time of exit.
1246 1246
1247 1247 Saving of persistent data should be performed here. """
1248 1248
1249 1249 #print '*** IPython exit cleanup ***' # dbg
1250 1250 # input history
1251 1251 self.savehist()
1252 1252
1253 1253 # Cleanup all tempfiles left around
1254 1254 for tfile in self.tempfiles:
1255 1255 try:
1256 1256 os.unlink(tfile)
1257 1257 except OSError:
1258 1258 pass
1259 1259
1260 1260 self.hooks.shutdown_hook()
1261 1261
1262 1262 def savehist(self):
1263 1263 """Save input history to a file (via readline library)."""
1264 1264
1265 1265 if not self.has_readline:
1266 1266 return
1267 1267
1268 1268 try:
1269 1269 self.readline.write_history_file(self.histfile)
1270 1270 except:
1271 1271 print 'Unable to save IPython command history to file: ' + \
1272 1272 `self.histfile`
1273 1273
1274 1274 def reloadhist(self):
1275 1275 """Reload the input history from disk file."""
1276 1276
1277 1277 if self.has_readline:
1278 1278 try:
1279 1279 self.readline.clear_history()
1280 1280 self.readline.read_history_file(self.shell.histfile)
1281 1281 except AttributeError:
1282 1282 pass
1283 1283
1284 1284
1285 1285 def history_saving_wrapper(self, func):
1286 1286 """ Wrap func for readline history saving
1287 1287
1288 1288 Convert func into callable that saves & restores
1289 1289 history around the call """
1290 1290
1291 1291 if not self.has_readline:
1292 1292 return func
1293 1293
1294 1294 def wrapper():
1295 1295 self.savehist()
1296 1296 try:
1297 1297 func()
1298 1298 finally:
1299 1299 readline.read_history_file(self.histfile)
1300 1300 return wrapper
1301 1301
1302 1302
1303 1303 def pre_readline(self):
1304 1304 """readline hook to be used at the start of each line.
1305 1305
1306 1306 Currently it handles auto-indent only."""
1307 1307
1308 1308 #debugx('self.indent_current_nsp','pre_readline:')
1309 1309
1310 1310 if self.rl_do_indent:
1311 1311 self.readline.insert_text(self.indent_current_str())
1312 1312 if self.rl_next_input is not None:
1313 1313 self.readline.insert_text(self.rl_next_input)
1314 1314 self.rl_next_input = None
1315 1315
1316 1316 def init_readline(self):
1317 1317 """Command history completion/saving/reloading."""
1318 1318
1319 1319
1320 1320 import IPython.rlineimpl as readline
1321 1321
1322 1322 if not readline.have_readline:
1323 1323 self.has_readline = 0
1324 1324 self.readline = None
1325 1325 # no point in bugging windows users with this every time:
1326 1326 warn('Readline services not available on this platform.')
1327 1327 else:
1328 1328 sys.modules['readline'] = readline
1329 1329 import atexit
1330 1330 from IPython.completer import IPCompleter
1331 1331 self.Completer = IPCompleter(self,
1332 1332 self.user_ns,
1333 1333 self.user_global_ns,
1334 1334 self.rc.readline_omit__names,
1335 1335 self.alias_table)
1336 1336 sdisp = self.strdispatchers.get('complete_command', StrDispatch())
1337 1337 self.strdispatchers['complete_command'] = sdisp
1338 1338 self.Completer.custom_completers = sdisp
1339 1339 # Platform-specific configuration
1340 1340 if os.name == 'nt':
1341 1341 self.readline_startup_hook = readline.set_pre_input_hook
1342 1342 else:
1343 1343 self.readline_startup_hook = readline.set_startup_hook
1344 1344
1345 1345 # Load user's initrc file (readline config)
1346 1346 # Or if libedit is used, load editrc.
1347 1347 inputrc_name = os.environ.get('INPUTRC')
1348 1348 if inputrc_name is None:
1349 1349 home_dir = get_home_dir()
1350 1350 if home_dir is not None:
1351 1351 inputrc_name = '.inputrc'
1352 1352 if readline.uses_libedit:
1353 1353 inputrc_name = '.editrc'
1354 1354 inputrc_name = os.path.join(home_dir, inputrc_name)
1355 1355 if os.path.isfile(inputrc_name):
1356 1356 try:
1357 1357 readline.read_init_file(inputrc_name)
1358 1358 except:
1359 1359 warn('Problems reading readline initialization file <%s>'
1360 1360 % inputrc_name)
1361 1361
1362 1362 self.has_readline = 1
1363 1363 self.readline = readline
1364 1364 # save this in sys so embedded copies can restore it properly
1365 1365 sys.ipcompleter = self.Completer.complete
1366 1366 self.set_completer()
1367 1367
1368 1368 # Configure readline according to user's prefs
1369 1369 # This is only done if GNU readline is being used. If libedit
1370 1370 # is being used (as on Leopard) the readline config is
1371 1371 # not run as the syntax for libedit is different.
1372 1372 if not readline.uses_libedit:
1373 1373 for rlcommand in self.rc.readline_parse_and_bind:
1374 1374 readline.parse_and_bind(rlcommand)
1375 1375
1376 1376 # remove some chars from the delimiters list
1377 1377 delims = readline.get_completer_delims()
1378 1378 delims = delims.translate(string._idmap,
1379 1379 self.rc.readline_remove_delims)
1380 1380 readline.set_completer_delims(delims)
1381 1381 # otherwise we end up with a monster history after a while:
1382 1382 readline.set_history_length(1000)
1383 1383 try:
1384 1384 #print '*** Reading readline history' # dbg
1385 1385 readline.read_history_file(self.histfile)
1386 1386 except IOError:
1387 1387 pass # It doesn't exist yet.
1388 1388
1389 1389 atexit.register(self.atexit_operations)
1390 1390 del atexit
1391 1391
1392 1392 # Configure auto-indent for all platforms
1393 1393 self.set_autoindent(self.rc.autoindent)
1394 1394
1395 1395 def ask_yes_no(self,prompt,default=True):
1396 1396 if self.rc.quiet:
1397 1397 return True
1398 1398 return ask_yes_no(prompt,default)
1399 1399
1400 1400 def _should_recompile(self,e):
1401 1401 """Utility routine for edit_syntax_error"""
1402 1402
1403 1403 if e.filename in ('<ipython console>','<input>','<string>',
1404 1404 '<console>','<BackgroundJob compilation>',
1405 1405 None):
1406 1406
1407 1407 return False
1408 1408 try:
1409 1409 if (self.rc.autoedit_syntax and
1410 1410 not self.ask_yes_no('Return to editor to correct syntax error? '
1411 1411 '[Y/n] ','y')):
1412 1412 return False
1413 1413 except EOFError:
1414 1414 return False
1415 1415
1416 1416 def int0(x):
1417 1417 try:
1418 1418 return int(x)
1419 1419 except TypeError:
1420 1420 return 0
1421 1421 # always pass integer line and offset values to editor hook
1422 self.hooks.fix_error_editor(e.filename,
1423 int0(e.lineno),int0(e.offset),e.msg)
1422 try:
1423 self.hooks.fix_error_editor(e.filename,
1424 int0(e.lineno),int0(e.offset),e.msg)
1425 except IPython.ipapi.TryNext:
1426 warn('Could not open editor')
1427 return False
1424 1428 return True
1425 1429
1426 1430 def edit_syntax_error(self):
1427 1431 """The bottom half of the syntax error handler called in the main loop.
1428 1432
1429 1433 Loop until syntax error is fixed or user cancels.
1430 1434 """
1431 1435
1432 1436 while self.SyntaxTB.last_syntax_error:
1433 1437 # copy and clear last_syntax_error
1434 1438 err = self.SyntaxTB.clear_err_state()
1435 1439 if not self._should_recompile(err):
1436 1440 return
1437 1441 try:
1438 1442 # may set last_syntax_error again if a SyntaxError is raised
1439 1443 self.safe_execfile(err.filename,self.user_ns)
1440 1444 except:
1441 1445 self.showtraceback()
1442 1446 else:
1443 1447 try:
1444 1448 f = file(err.filename)
1445 1449 try:
1446 1450 sys.displayhook(f.read())
1447 1451 finally:
1448 1452 f.close()
1449 1453 except:
1450 1454 self.showtraceback()
1451 1455
1452 1456 def showsyntaxerror(self, filename=None):
1453 1457 """Display the syntax error that just occurred.
1454 1458
1455 1459 This doesn't display a stack trace because there isn't one.
1456 1460
1457 1461 If a filename is given, it is stuffed in the exception instead
1458 1462 of what was there before (because Python's parser always uses
1459 1463 "<string>" when reading from a string).
1460 1464 """
1461 1465 etype, value, last_traceback = sys.exc_info()
1462 1466
1463 1467 # See note about these variables in showtraceback() below
1464 1468 sys.last_type = etype
1465 1469 sys.last_value = value
1466 1470 sys.last_traceback = last_traceback
1467 1471
1468 1472 if filename and etype is SyntaxError:
1469 1473 # Work hard to stuff the correct filename in the exception
1470 1474 try:
1471 1475 msg, (dummy_filename, lineno, offset, line) = value
1472 1476 except:
1473 1477 # Not the format we expect; leave it alone
1474 1478 pass
1475 1479 else:
1476 1480 # Stuff in the right filename
1477 1481 try:
1478 1482 # Assume SyntaxError is a class exception
1479 1483 value = SyntaxError(msg, (filename, lineno, offset, line))
1480 1484 except:
1481 1485 # If that failed, assume SyntaxError is a string
1482 1486 value = msg, (filename, lineno, offset, line)
1483 1487 self.SyntaxTB(etype,value,[])
1484 1488
1485 1489 def debugger(self,force=False):
1486 1490 """Call the pydb/pdb debugger.
1487 1491
1488 1492 Keywords:
1489 1493
1490 1494 - force(False): by default, this routine checks the instance call_pdb
1491 1495 flag and does not actually invoke the debugger if the flag is false.
1492 1496 The 'force' option forces the debugger to activate even if the flag
1493 1497 is false.
1494 1498 """
1495 1499
1496 1500 if not (force or self.call_pdb):
1497 1501 return
1498 1502
1499 1503 if not hasattr(sys,'last_traceback'):
1500 1504 error('No traceback has been produced, nothing to debug.')
1501 1505 return
1502 1506
1503 1507 # use pydb if available
1504 1508 if Debugger.has_pydb:
1505 1509 from pydb import pm
1506 1510 else:
1507 1511 # fallback to our internal debugger
1508 1512 pm = lambda : self.InteractiveTB.debugger(force=True)
1509 1513 self.history_saving_wrapper(pm)()
1510 1514
1511 1515 def showtraceback(self,exc_tuple = None,filename=None,tb_offset=None):
1512 1516 """Display the exception that just occurred.
1513 1517
1514 1518 If nothing is known about the exception, this is the method which
1515 1519 should be used throughout the code for presenting user tracebacks,
1516 1520 rather than directly invoking the InteractiveTB object.
1517 1521
1518 1522 A specific showsyntaxerror() also exists, but this method can take
1519 1523 care of calling it if needed, so unless you are explicitly catching a
1520 1524 SyntaxError exception, don't try to analyze the stack manually and
1521 1525 simply call this method."""
1522 1526
1523 1527
1524 1528 # Though this won't be called by syntax errors in the input line,
1525 1529 # there may be SyntaxError cases whith imported code.
1526 1530
1527 1531 try:
1528 1532 if exc_tuple is None:
1529 1533 etype, value, tb = sys.exc_info()
1530 1534 else:
1531 1535 etype, value, tb = exc_tuple
1532 1536
1533 1537 if etype is SyntaxError:
1534 1538 self.showsyntaxerror(filename)
1535 1539 elif etype is IPython.ipapi.UsageError:
1536 1540 print "UsageError:", value
1537 1541 else:
1538 1542 # WARNING: these variables are somewhat deprecated and not
1539 1543 # necessarily safe to use in a threaded environment, but tools
1540 1544 # like pdb depend on their existence, so let's set them. If we
1541 1545 # find problems in the field, we'll need to revisit their use.
1542 1546 sys.last_type = etype
1543 1547 sys.last_value = value
1544 1548 sys.last_traceback = tb
1545 1549
1546 1550 if etype in self.custom_exceptions:
1547 1551 self.CustomTB(etype,value,tb)
1548 1552 else:
1549 1553 self.InteractiveTB(etype,value,tb,tb_offset=tb_offset)
1550 1554 if self.InteractiveTB.call_pdb and self.has_readline:
1551 1555 # pdb mucks up readline, fix it back
1552 1556 self.set_completer()
1553 1557 except KeyboardInterrupt:
1554 1558 self.write("\nKeyboardInterrupt\n")
1555 1559
1556 1560
1557 1561
1558 1562 def mainloop(self,banner=None):
1559 1563 """Creates the local namespace and starts the mainloop.
1560 1564
1561 1565 If an optional banner argument is given, it will override the
1562 1566 internally created default banner."""
1563 1567
1564 1568 if self.rc.c: # Emulate Python's -c option
1565 1569 self.exec_init_cmd()
1566 1570 if banner is None:
1567 1571 if not self.rc.banner:
1568 1572 banner = ''
1569 1573 # banner is string? Use it directly!
1570 1574 elif isinstance(self.rc.banner,basestring):
1571 1575 banner = self.rc.banner
1572 1576 else:
1573 1577 banner = self.BANNER+self.banner2
1574 1578
1579 # if you run stuff with -c <cmd>, raw hist is not updated
1580 # ensure that it's in sync
1581 if len(self.input_hist) != len (self.input_hist_raw):
1582 self.input_hist_raw = InputList(self.input_hist)
1583
1575 1584 while 1:
1576 1585 try:
1577 1586 self.interact(banner)
1578 1587 #self.interact_with_readline()
1579 1588 # XXX for testing of a readline-decoupled repl loop, call interact_with_readline above
1580 1589
1581 1590 break
1582 1591 except KeyboardInterrupt:
1583 1592 # this should not be necessary, but KeyboardInterrupt
1584 1593 # handling seems rather unpredictable...
1585 1594 self.write("\nKeyboardInterrupt in interact()\n")
1586 1595
1587 1596 def exec_init_cmd(self):
1588 1597 """Execute a command given at the command line.
1589 1598
1590 1599 This emulates Python's -c option."""
1591 1600
1592 1601 #sys.argv = ['-c']
1593 1602 self.push(self.prefilter(self.rc.c, False))
1594 1603 if not self.rc.interact:
1595 1604 self.ask_exit()
1596 1605
1597 1606 def embed_mainloop(self,header='',local_ns=None,global_ns=None,stack_depth=0):
1598 1607 """Embeds IPython into a running python program.
1599 1608
1600 1609 Input:
1601 1610
1602 1611 - header: An optional header message can be specified.
1603 1612
1604 1613 - local_ns, global_ns: working namespaces. If given as None, the
1605 1614 IPython-initialized one is updated with __main__.__dict__, so that
1606 1615 program variables become visible but user-specific configuration
1607 1616 remains possible.
1608 1617
1609 1618 - stack_depth: specifies how many levels in the stack to go to
1610 1619 looking for namespaces (when local_ns and global_ns are None). This
1611 1620 allows an intermediate caller to make sure that this function gets
1612 1621 the namespace from the intended level in the stack. By default (0)
1613 1622 it will get its locals and globals from the immediate caller.
1614 1623
1615 1624 Warning: it's possible to use this in a program which is being run by
1616 1625 IPython itself (via %run), but some funny things will happen (a few
1617 1626 globals get overwritten). In the future this will be cleaned up, as
1618 1627 there is no fundamental reason why it can't work perfectly."""
1619 1628
1620 1629 # Get locals and globals from caller
1621 1630 if local_ns is None or global_ns is None:
1622 1631 call_frame = sys._getframe(stack_depth).f_back
1623 1632
1624 1633 if local_ns is None:
1625 1634 local_ns = call_frame.f_locals
1626 1635 if global_ns is None:
1627 1636 global_ns = call_frame.f_globals
1628 1637
1629 1638 # Update namespaces and fire up interpreter
1630 1639
1631 1640 # The global one is easy, we can just throw it in
1632 1641 self.user_global_ns = global_ns
1633 1642
1634 1643 # but the user/local one is tricky: ipython needs it to store internal
1635 1644 # data, but we also need the locals. We'll copy locals in the user
1636 1645 # one, but will track what got copied so we can delete them at exit.
1637 1646 # This is so that a later embedded call doesn't see locals from a
1638 1647 # previous call (which most likely existed in a separate scope).
1639 1648 local_varnames = local_ns.keys()
1640 1649 self.user_ns.update(local_ns)
1641 1650 #self.user_ns['local_ns'] = local_ns # dbg
1642 1651
1643 1652 # Patch for global embedding to make sure that things don't overwrite
1644 1653 # user globals accidentally. Thanks to Richard <rxe@renre-europe.com>
1645 1654 # FIXME. Test this a bit more carefully (the if.. is new)
1646 1655 if local_ns is None and global_ns is None:
1647 1656 self.user_global_ns.update(__main__.__dict__)
1648 1657
1649 1658 # make sure the tab-completer has the correct frame information, so it
1650 1659 # actually completes using the frame's locals/globals
1651 1660 self.set_completer_frame()
1652 1661
1653 1662 # before activating the interactive mode, we need to make sure that
1654 1663 # all names in the builtin namespace needed by ipython point to
1655 1664 # ourselves, and not to other instances.
1656 1665 self.add_builtins()
1657 1666
1658 1667 self.interact(header)
1659 1668
1660 1669 # now, purge out the user namespace from anything we might have added
1661 1670 # from the caller's local namespace
1662 1671 delvar = self.user_ns.pop
1663 1672 for var in local_varnames:
1664 1673 delvar(var,None)
1665 1674 # and clean builtins we may have overridden
1666 1675 self.clean_builtins()
1667 1676
1668 1677 def interact_prompt(self):
1669 1678 """ Print the prompt (in read-eval-print loop)
1670 1679
1671 1680 Provided for those who want to implement their own read-eval-print loop (e.g. GUIs), not
1672 1681 used in standard IPython flow.
1673 1682 """
1674 1683 if self.more:
1675 1684 try:
1676 1685 prompt = self.hooks.generate_prompt(True)
1677 1686 except:
1678 1687 self.showtraceback()
1679 1688 if self.autoindent:
1680 1689 self.rl_do_indent = True
1681 1690
1682 1691 else:
1683 1692 try:
1684 1693 prompt = self.hooks.generate_prompt(False)
1685 1694 except:
1686 1695 self.showtraceback()
1687 1696 self.write(prompt)
1688 1697
1689 1698 def interact_handle_input(self,line):
1690 1699 """ Handle the input line (in read-eval-print loop)
1691 1700
1692 1701 Provided for those who want to implement their own read-eval-print loop (e.g. GUIs), not
1693 1702 used in standard IPython flow.
1694 1703 """
1695 1704 if line.lstrip() == line:
1696 1705 self.shadowhist.add(line.strip())
1697 1706 lineout = self.prefilter(line,self.more)
1698 1707
1699 1708 if line.strip():
1700 1709 if self.more:
1701 1710 self.input_hist_raw[-1] += '%s\n' % line
1702 1711 else:
1703 1712 self.input_hist_raw.append('%s\n' % line)
1704 1713
1705 1714
1706 1715 self.more = self.push(lineout)
1707 1716 if (self.SyntaxTB.last_syntax_error and
1708 1717 self.rc.autoedit_syntax):
1709 1718 self.edit_syntax_error()
1710 1719
1711 1720 def interact_with_readline(self):
1712 1721 """ Demo of using interact_handle_input, interact_prompt
1713 1722
1714 1723 This is the main read-eval-print loop. If you need to implement your own (e.g. for GUI),
1715 1724 it should work like this.
1716 1725 """
1717 1726 self.readline_startup_hook(self.pre_readline)
1718 1727 while not self.exit_now:
1719 1728 self.interact_prompt()
1720 1729 if self.more:
1721 1730 self.rl_do_indent = True
1722 1731 else:
1723 1732 self.rl_do_indent = False
1724 1733 line = raw_input_original().decode(self.stdin_encoding)
1725 1734 self.interact_handle_input(line)
1726 1735
1727 1736
1728 1737 def interact(self, banner=None):
1729 1738 """Closely emulate the interactive Python console.
1730 1739
1731 1740 The optional banner argument specify the banner to print
1732 1741 before the first interaction; by default it prints a banner
1733 1742 similar to the one printed by the real Python interpreter,
1734 1743 followed by the current class name in parentheses (so as not
1735 1744 to confuse this with the real interpreter -- since it's so
1736 1745 close!).
1737 1746
1738 1747 """
1739 1748
1740 1749 if self.exit_now:
1741 1750 # batch run -> do not interact
1742 1751 return
1743 1752 cprt = 'Type "copyright", "credits" or "license" for more information.'
1744 1753 if banner is None:
1745 1754 self.write("Python %s on %s\n%s\n(%s)\n" %
1746 1755 (sys.version, sys.platform, cprt,
1747 1756 self.__class__.__name__))
1748 1757 else:
1749 1758 self.write(banner)
1750 1759
1751 1760 more = 0
1752 1761
1753 1762 # Mark activity in the builtins
1754 1763 __builtin__.__dict__['__IPYTHON__active'] += 1
1755 1764
1756 1765 if self.has_readline:
1757 1766 self.readline_startup_hook(self.pre_readline)
1758 1767 # exit_now is set by a call to %Exit or %Quit, through the
1759 1768 # ask_exit callback.
1760 1769
1761 1770 while not self.exit_now:
1762 1771 self.hooks.pre_prompt_hook()
1763 1772 if more:
1764 1773 try:
1765 1774 prompt = self.hooks.generate_prompt(True)
1766 1775 except:
1767 1776 self.showtraceback()
1768 1777 if self.autoindent:
1769 1778 self.rl_do_indent = True
1770 1779
1771 1780 else:
1772 1781 try:
1773 1782 prompt = self.hooks.generate_prompt(False)
1774 1783 except:
1775 1784 self.showtraceback()
1776 1785 try:
1777 1786 line = self.raw_input(prompt,more)
1778 1787 if self.exit_now:
1779 1788 # quick exit on sys.std[in|out] close
1780 1789 break
1781 1790 if self.autoindent:
1782 1791 self.rl_do_indent = False
1783 1792
1784 1793 except KeyboardInterrupt:
1785 1794 #double-guard against keyboardinterrupts during kbdint handling
1786 1795 try:
1787 1796 self.write('\nKeyboardInterrupt\n')
1788 1797 self.resetbuffer()
1789 1798 # keep cache in sync with the prompt counter:
1790 1799 self.outputcache.prompt_count -= 1
1791 1800
1792 1801 if self.autoindent:
1793 1802 self.indent_current_nsp = 0
1794 1803 more = 0
1795 1804 except KeyboardInterrupt:
1796 1805 pass
1797 1806 except EOFError:
1798 1807 if self.autoindent:
1799 1808 self.rl_do_indent = False
1800 1809 self.readline_startup_hook(None)
1801 1810 self.write('\n')
1802 1811 self.exit()
1803 1812 except bdb.BdbQuit:
1804 1813 warn('The Python debugger has exited with a BdbQuit exception.\n'
1805 1814 'Because of how pdb handles the stack, it is impossible\n'
1806 1815 'for IPython to properly format this particular exception.\n'
1807 1816 'IPython will resume normal operation.')
1808 1817 except:
1809 1818 # exceptions here are VERY RARE, but they can be triggered
1810 1819 # asynchronously by signal handlers, for example.
1811 1820 self.showtraceback()
1812 1821 else:
1813 1822 more = self.push(line)
1814 1823 if (self.SyntaxTB.last_syntax_error and
1815 1824 self.rc.autoedit_syntax):
1816 1825 self.edit_syntax_error()
1817 1826
1818 1827 # We are off again...
1819 1828 __builtin__.__dict__['__IPYTHON__active'] -= 1
1820 1829
1821 1830 def excepthook(self, etype, value, tb):
1822 1831 """One more defense for GUI apps that call sys.excepthook.
1823 1832
1824 1833 GUI frameworks like wxPython trap exceptions and call
1825 1834 sys.excepthook themselves. I guess this is a feature that
1826 1835 enables them to keep running after exceptions that would
1827 1836 otherwise kill their mainloop. This is a bother for IPython
1828 1837 which excepts to catch all of the program exceptions with a try:
1829 1838 except: statement.
1830 1839
1831 1840 Normally, IPython sets sys.excepthook to a CrashHandler instance, so if
1832 1841 any app directly invokes sys.excepthook, it will look to the user like
1833 1842 IPython crashed. In order to work around this, we can disable the
1834 1843 CrashHandler and replace it with this excepthook instead, which prints a
1835 1844 regular traceback using our InteractiveTB. In this fashion, apps which
1836 1845 call sys.excepthook will generate a regular-looking exception from
1837 1846 IPython, and the CrashHandler will only be triggered by real IPython
1838 1847 crashes.
1839 1848
1840 1849 This hook should be used sparingly, only in places which are not likely
1841 1850 to be true IPython errors.
1842 1851 """
1843 1852 self.showtraceback((etype,value,tb),tb_offset=0)
1844 1853
1845 1854 def expand_aliases(self,fn,rest):
1846 1855 """ Expand multiple levels of aliases:
1847 1856
1848 1857 if:
1849 1858
1850 1859 alias foo bar /tmp
1851 1860 alias baz foo
1852 1861
1853 1862 then:
1854 1863
1855 1864 baz huhhahhei -> bar /tmp huhhahhei
1856 1865
1857 1866 """
1858 1867 line = fn + " " + rest
1859 1868
1860 1869 done = Set()
1861 1870 while 1:
1862 1871 pre,fn,rest = prefilter.splitUserInput(line,
1863 1872 prefilter.shell_line_split)
1864 1873 if fn in self.alias_table:
1865 1874 if fn in done:
1866 1875 warn("Cyclic alias definition, repeated '%s'" % fn)
1867 1876 return ""
1868 1877 done.add(fn)
1869 1878
1870 1879 l2 = self.transform_alias(fn,rest)
1871 1880 # dir -> dir
1872 1881 # print "alias",line, "->",l2 #dbg
1873 1882 if l2 == line:
1874 1883 break
1875 1884 # ls -> ls -F should not recurse forever
1876 1885 if l2.split(None,1)[0] == line.split(None,1)[0]:
1877 1886 line = l2
1878 1887 break
1879 1888
1880 1889 line=l2
1881 1890
1882 1891
1883 1892 # print "al expand to",line #dbg
1884 1893 else:
1885 1894 break
1886 1895
1887 1896 return line
1888 1897
1889 1898 def transform_alias(self, alias,rest=''):
1890 1899 """ Transform alias to system command string.
1891 1900 """
1892 1901 trg = self.alias_table[alias]
1893 1902
1894 1903 nargs,cmd = trg
1895 1904 # print trg #dbg
1896 1905 if ' ' in cmd and os.path.isfile(cmd):
1897 1906 cmd = '"%s"' % cmd
1898 1907
1899 1908 # Expand the %l special to be the user's input line
1900 1909 if cmd.find('%l') >= 0:
1901 1910 cmd = cmd.replace('%l',rest)
1902 1911 rest = ''
1903 1912 if nargs==0:
1904 1913 # Simple, argument-less aliases
1905 1914 cmd = '%s %s' % (cmd,rest)
1906 1915 else:
1907 1916 # Handle aliases with positional arguments
1908 1917 args = rest.split(None,nargs)
1909 1918 if len(args)< nargs:
1910 1919 error('Alias <%s> requires %s arguments, %s given.' %
1911 1920 (alias,nargs,len(args)))
1912 1921 return None
1913 1922 cmd = '%s %s' % (cmd % tuple(args[:nargs]),' '.join(args[nargs:]))
1914 1923 # Now call the macro, evaluating in the user's namespace
1915 1924 #print 'new command: <%r>' % cmd # dbg
1916 1925 return cmd
1917 1926
1918 1927 def call_alias(self,alias,rest=''):
1919 1928 """Call an alias given its name and the rest of the line.
1920 1929
1921 1930 This is only used to provide backwards compatibility for users of
1922 1931 ipalias(), use of which is not recommended for anymore."""
1923 1932
1924 1933 # Now call the macro, evaluating in the user's namespace
1925 1934 cmd = self.transform_alias(alias, rest)
1926 1935 try:
1927 1936 self.system(cmd)
1928 1937 except:
1929 1938 self.showtraceback()
1930 1939
1931 1940 def indent_current_str(self):
1932 1941 """return the current level of indentation as a string"""
1933 1942 return self.indent_current_nsp * ' '
1934 1943
1935 1944 def autoindent_update(self,line):
1936 1945 """Keep track of the indent level."""
1937 1946
1938 1947 #debugx('line')
1939 1948 #debugx('self.indent_current_nsp')
1940 1949 if self.autoindent:
1941 1950 if line:
1942 1951 inisp = num_ini_spaces(line)
1943 1952 if inisp < self.indent_current_nsp:
1944 1953 self.indent_current_nsp = inisp
1945 1954
1946 1955 if line[-1] == ':':
1947 1956 self.indent_current_nsp += 4
1948 1957 elif dedent_re.match(line):
1949 1958 self.indent_current_nsp -= 4
1950 1959 else:
1951 1960 self.indent_current_nsp = 0
1952 1961
1953 1962 def runlines(self,lines):
1954 1963 """Run a string of one or more lines of source.
1955 1964
1956 1965 This method is capable of running a string containing multiple source
1957 1966 lines, as if they had been entered at the IPython prompt. Since it
1958 1967 exposes IPython's processing machinery, the given strings can contain
1959 1968 magic calls (%magic), special shell access (!cmd), etc."""
1960 1969
1961 1970 # We must start with a clean buffer, in case this is run from an
1962 1971 # interactive IPython session (via a magic, for example).
1963 1972 self.resetbuffer()
1964 1973 lines = lines.split('\n')
1965 1974 more = 0
1966 1975
1967 1976 for line in lines:
1968 1977 # skip blank lines so we don't mess up the prompt counter, but do
1969 1978 # NOT skip even a blank line if we are in a code block (more is
1970 1979 # true)
1971 1980
1972 1981
1973 1982 if line or more:
1974 1983 # push to raw history, so hist line numbers stay in sync
1975 1984 self.input_hist_raw.append("# " + line + "\n")
1976 1985 more = self.push(self.prefilter(line,more))
1977 1986 # IPython's runsource returns None if there was an error
1978 1987 # compiling the code. This allows us to stop processing right
1979 1988 # away, so the user gets the error message at the right place.
1980 1989 if more is None:
1981 1990 break
1982 1991 else:
1983 1992 self.input_hist_raw.append("\n")
1984 1993 # final newline in case the input didn't have it, so that the code
1985 1994 # actually does get executed
1986 1995 if more:
1987 1996 self.push('\n')
1988 1997
1989 1998 def runsource(self, source, filename='<input>', symbol='single'):
1990 1999 """Compile and run some source in the interpreter.
1991 2000
1992 2001 Arguments are as for compile_command().
1993 2002
1994 2003 One several things can happen:
1995 2004
1996 2005 1) The input is incorrect; compile_command() raised an
1997 2006 exception (SyntaxError or OverflowError). A syntax traceback
1998 2007 will be printed by calling the showsyntaxerror() method.
1999 2008
2000 2009 2) The input is incomplete, and more input is required;
2001 2010 compile_command() returned None. Nothing happens.
2002 2011
2003 2012 3) The input is complete; compile_command() returned a code
2004 2013 object. The code is executed by calling self.runcode() (which
2005 2014 also handles run-time exceptions, except for SystemExit).
2006 2015
2007 2016 The return value is:
2008 2017
2009 2018 - True in case 2
2010 2019
2011 2020 - False in the other cases, unless an exception is raised, where
2012 2021 None is returned instead. This can be used by external callers to
2013 2022 know whether to continue feeding input or not.
2014 2023
2015 2024 The return value can be used to decide whether to use sys.ps1 or
2016 2025 sys.ps2 to prompt the next line."""
2017 2026
2018 2027 # if the source code has leading blanks, add 'if 1:\n' to it
2019 2028 # this allows execution of indented pasted code. It is tempting
2020 2029 # to add '\n' at the end of source to run commands like ' a=1'
2021 2030 # directly, but this fails for more complicated scenarios
2022 2031 source=source.encode(self.stdin_encoding)
2023 2032 if source[:1] in [' ', '\t']:
2024 2033 source = 'if 1:\n%s' % source
2025 2034
2026 2035 try:
2027 2036 code = self.compile(source,filename,symbol)
2028 2037 except (OverflowError, SyntaxError, ValueError, TypeError):
2029 2038 # Case 1
2030 2039 self.showsyntaxerror(filename)
2031 2040 return None
2032 2041
2033 2042 if code is None:
2034 2043 # Case 2
2035 2044 return True
2036 2045
2037 2046 # Case 3
2038 2047 # We store the code object so that threaded shells and
2039 2048 # custom exception handlers can access all this info if needed.
2040 2049 # The source corresponding to this can be obtained from the
2041 2050 # buffer attribute as '\n'.join(self.buffer).
2042 2051 self.code_to_run = code
2043 2052 # now actually execute the code object
2044 2053 if self.runcode(code) == 0:
2045 2054 return False
2046 2055 else:
2047 2056 return None
2048 2057
2049 2058 def runcode(self,code_obj):
2050 2059 """Execute a code object.
2051 2060
2052 2061 When an exception occurs, self.showtraceback() is called to display a
2053 2062 traceback.
2054 2063
2055 2064 Return value: a flag indicating whether the code to be run completed
2056 2065 successfully:
2057 2066
2058 2067 - 0: successful execution.
2059 2068 - 1: an error occurred.
2060 2069 """
2061 2070
2062 2071 # Set our own excepthook in case the user code tries to call it
2063 2072 # directly, so that the IPython crash handler doesn't get triggered
2064 2073 old_excepthook,sys.excepthook = sys.excepthook, self.excepthook
2065 2074
2066 2075 # we save the original sys.excepthook in the instance, in case config
2067 2076 # code (such as magics) needs access to it.
2068 2077 self.sys_excepthook = old_excepthook
2069 2078 outflag = 1 # happens in more places, so it's easier as default
2070 2079 try:
2071 2080 try:
2072 2081 self.hooks.pre_runcode_hook()
2073 2082 exec code_obj in self.user_global_ns, self.user_ns
2074 2083 finally:
2075 2084 # Reset our crash handler in place
2076 2085 sys.excepthook = old_excepthook
2077 2086 except SystemExit:
2078 2087 self.resetbuffer()
2079 2088 self.showtraceback()
2080 2089 warn("Type %exit or %quit to exit IPython "
2081 2090 "(%Exit or %Quit do so unconditionally).",level=1)
2082 2091 except self.custom_exceptions:
2083 2092 etype,value,tb = sys.exc_info()
2084 2093 self.CustomTB(etype,value,tb)
2085 2094 except:
2086 2095 self.showtraceback()
2087 2096 else:
2088 2097 outflag = 0
2089 2098 if softspace(sys.stdout, 0):
2090 2099 print
2091 2100 # Flush out code object which has been run (and source)
2092 2101 self.code_to_run = None
2093 2102 return outflag
2094 2103
2095 2104 def push(self, line):
2096 2105 """Push a line to the interpreter.
2097 2106
2098 2107 The line should not have a trailing newline; it may have
2099 2108 internal newlines. The line is appended to a buffer and the
2100 2109 interpreter's runsource() method is called with the
2101 2110 concatenated contents of the buffer as source. If this
2102 2111 indicates that the command was executed or invalid, the buffer
2103 2112 is reset; otherwise, the command is incomplete, and the buffer
2104 2113 is left as it was after the line was appended. The return
2105 2114 value is 1 if more input is required, 0 if the line was dealt
2106 2115 with in some way (this is the same as runsource()).
2107 2116 """
2108 2117
2109 2118 # autoindent management should be done here, and not in the
2110 2119 # interactive loop, since that one is only seen by keyboard input. We
2111 2120 # need this done correctly even for code run via runlines (which uses
2112 2121 # push).
2113 2122
2114 2123 #print 'push line: <%s>' % line # dbg
2115 2124 for subline in line.splitlines():
2116 2125 self.autoindent_update(subline)
2117 2126 self.buffer.append(line)
2118 2127 more = self.runsource('\n'.join(self.buffer), self.filename)
2119 2128 if not more:
2120 2129 self.resetbuffer()
2121 2130 return more
2122 2131
2123 2132 def split_user_input(self, line):
2124 2133 # This is really a hold-over to support ipapi and some extensions
2125 2134 return prefilter.splitUserInput(line)
2126 2135
2127 2136 def resetbuffer(self):
2128 2137 """Reset the input buffer."""
2129 2138 self.buffer[:] = []
2130 2139
2131 2140 def raw_input(self,prompt='',continue_prompt=False):
2132 2141 """Write a prompt and read a line.
2133 2142
2134 2143 The returned line does not include the trailing newline.
2135 2144 When the user enters the EOF key sequence, EOFError is raised.
2136 2145
2137 2146 Optional inputs:
2138 2147
2139 2148 - prompt(''): a string to be printed to prompt the user.
2140 2149
2141 2150 - continue_prompt(False): whether this line is the first one or a
2142 2151 continuation in a sequence of inputs.
2143 2152 """
2144 2153
2145 2154 # Code run by the user may have modified the readline completer state.
2146 2155 # We must ensure that our completer is back in place.
2147 2156 if self.has_readline:
2148 2157 self.set_completer()
2149 2158
2150 2159 try:
2151 2160 line = raw_input_original(prompt).decode(self.stdin_encoding)
2152 2161 except ValueError:
2153 2162 warn("\n********\nYou or a %run:ed script called sys.stdin.close()"
2154 2163 " or sys.stdout.close()!\nExiting IPython!")
2155 2164 self.ask_exit()
2156 2165 return ""
2157 2166
2158 2167 # Try to be reasonably smart about not re-indenting pasted input more
2159 2168 # than necessary. We do this by trimming out the auto-indent initial
2160 2169 # spaces, if the user's actual input started itself with whitespace.
2161 2170 #debugx('self.buffer[-1]')
2162 2171
2163 2172 if self.autoindent:
2164 2173 if num_ini_spaces(line) > self.indent_current_nsp:
2165 2174 line = line[self.indent_current_nsp:]
2166 2175 self.indent_current_nsp = 0
2167 2176
2168 2177 # store the unfiltered input before the user has any chance to modify
2169 2178 # it.
2170 2179 if line.strip():
2171 2180 if continue_prompt:
2172 2181 self.input_hist_raw[-1] += '%s\n' % line
2173 2182 if self.has_readline: # and some config option is set?
2174 2183 try:
2175 2184 histlen = self.readline.get_current_history_length()
2176 2185 if histlen > 1:
2177 2186 newhist = self.input_hist_raw[-1].rstrip()
2178 2187 self.readline.remove_history_item(histlen-1)
2179 2188 self.readline.replace_history_item(histlen-2,
2180 2189 newhist.encode(self.stdin_encoding))
2181 2190 except AttributeError:
2182 2191 pass # re{move,place}_history_item are new in 2.4.
2183 2192 else:
2184 2193 self.input_hist_raw.append('%s\n' % line)
2185 2194 # only entries starting at first column go to shadow history
2186 2195 if line.lstrip() == line:
2187 2196 self.shadowhist.add(line.strip())
2188 2197 elif not continue_prompt:
2189 2198 self.input_hist_raw.append('\n')
2190 2199 try:
2191 2200 lineout = self.prefilter(line,continue_prompt)
2192 2201 except:
2193 2202 # blanket except, in case a user-defined prefilter crashes, so it
2194 2203 # can't take all of ipython with it.
2195 2204 self.showtraceback()
2196 2205 return ''
2197 2206 else:
2198 2207 return lineout
2199 2208
2200 2209 def _prefilter(self, line, continue_prompt):
2201 2210 """Calls different preprocessors, depending on the form of line."""
2202 2211
2203 2212 # All handlers *must* return a value, even if it's blank ('').
2204 2213
2205 2214 # Lines are NOT logged here. Handlers should process the line as
2206 2215 # needed, update the cache AND log it (so that the input cache array
2207 2216 # stays synced).
2208 2217
2209 2218 #.....................................................................
2210 2219 # Code begins
2211 2220
2212 2221 #if line.startswith('%crash'): raise RuntimeError,'Crash now!' # dbg
2213 2222
2214 2223 # save the line away in case we crash, so the post-mortem handler can
2215 2224 # record it
2216 2225 self._last_input_line = line
2217 2226
2218 2227 #print '***line: <%s>' % line # dbg
2219 2228
2220 2229 if not line:
2221 2230 # Return immediately on purely empty lines, so that if the user
2222 2231 # previously typed some whitespace that started a continuation
2223 2232 # prompt, he can break out of that loop with just an empty line.
2224 2233 # This is how the default python prompt works.
2225 2234
2226 2235 # Only return if the accumulated input buffer was just whitespace!
2227 2236 if ''.join(self.buffer).isspace():
2228 2237 self.buffer[:] = []
2229 2238 return ''
2230 2239
2231 2240 line_info = prefilter.LineInfo(line, continue_prompt)
2232 2241
2233 2242 # the input history needs to track even empty lines
2234 2243 stripped = line.strip()
2235 2244
2236 2245 if not stripped:
2237 2246 if not continue_prompt:
2238 2247 self.outputcache.prompt_count -= 1
2239 2248 return self.handle_normal(line_info)
2240 2249
2241 2250 # print '***cont',continue_prompt # dbg
2242 2251 # special handlers are only allowed for single line statements
2243 2252 if continue_prompt and not self.rc.multi_line_specials:
2244 2253 return self.handle_normal(line_info)
2245 2254
2246 2255
2247 2256 # See whether any pre-existing handler can take care of it
2248 2257 rewritten = self.hooks.input_prefilter(stripped)
2249 2258 if rewritten != stripped: # ok, some prefilter did something
2250 2259 rewritten = line_info.pre + rewritten # add indentation
2251 2260 return self.handle_normal(prefilter.LineInfo(rewritten,
2252 2261 continue_prompt))
2253 2262
2254 2263 #print 'pre <%s> iFun <%s> rest <%s>' % (pre,iFun,theRest) # dbg
2255 2264
2256 2265 return prefilter.prefilter(line_info, self)
2257 2266
2258 2267
2259 2268 def _prefilter_dumb(self, line, continue_prompt):
2260 2269 """simple prefilter function, for debugging"""
2261 2270 return self.handle_normal(line,continue_prompt)
2262 2271
2263 2272
2264 2273 def multiline_prefilter(self, line, continue_prompt):
2265 2274 """ Run _prefilter for each line of input
2266 2275
2267 2276 Covers cases where there are multiple lines in the user entry,
2268 2277 which is the case when the user goes back to a multiline history
2269 2278 entry and presses enter.
2270 2279
2271 2280 """
2272 2281 out = []
2273 2282 for l in line.rstrip('\n').split('\n'):
2274 2283 out.append(self._prefilter(l, continue_prompt))
2275 2284 return '\n'.join(out)
2276 2285
2277 2286 # Set the default prefilter() function (this can be user-overridden)
2278 2287 prefilter = multiline_prefilter
2279 2288
2280 2289 def handle_normal(self,line_info):
2281 2290 """Handle normal input lines. Use as a template for handlers."""
2282 2291
2283 2292 # With autoindent on, we need some way to exit the input loop, and I
2284 2293 # don't want to force the user to have to backspace all the way to
2285 2294 # clear the line. The rule will be in this case, that either two
2286 2295 # lines of pure whitespace in a row, or a line of pure whitespace but
2287 2296 # of a size different to the indent level, will exit the input loop.
2288 2297 line = line_info.line
2289 2298 continue_prompt = line_info.continue_prompt
2290 2299
2291 2300 if (continue_prompt and self.autoindent and line.isspace() and
2292 2301 (0 < abs(len(line) - self.indent_current_nsp) <= 2 or
2293 2302 (self.buffer[-1]).isspace() )):
2294 2303 line = ''
2295 2304
2296 2305 self.log(line,line,continue_prompt)
2297 2306 return line
2298 2307
2299 2308 def handle_alias(self,line_info):
2300 2309 """Handle alias input lines. """
2301 2310 tgt = self.alias_table[line_info.iFun]
2302 2311 # print "=>",tgt #dbg
2303 2312 if callable(tgt):
2304 2313 if '$' in line_info.line:
2305 2314 call_meth = '(_ip, _ip.itpl(%s))'
2306 2315 else:
2307 2316 call_meth = '(_ip,%s)'
2308 2317 line_out = ("%s_sh.%s" + call_meth) % (line_info.preWhitespace,
2309 2318 line_info.iFun,
2310 2319 make_quoted_expr(line_info.line))
2311 2320 else:
2312 2321 transformed = self.expand_aliases(line_info.iFun,line_info.theRest)
2313 2322
2314 2323 # pre is needed, because it carries the leading whitespace. Otherwise
2315 2324 # aliases won't work in indented sections.
2316 2325 line_out = '%s_ip.system(%s)' % (line_info.preWhitespace,
2317 2326 make_quoted_expr( transformed ))
2318 2327
2319 2328 self.log(line_info.line,line_out,line_info.continue_prompt)
2320 2329 #print 'line out:',line_out # dbg
2321 2330 return line_out
2322 2331
2323 2332 def handle_shell_escape(self, line_info):
2324 2333 """Execute the line in a shell, empty return value"""
2325 2334 #print 'line in :', `line` # dbg
2326 2335 line = line_info.line
2327 2336 if line.lstrip().startswith('!!'):
2328 2337 # rewrite LineInfo's line, iFun and theRest to properly hold the
2329 2338 # call to %sx and the actual command to be executed, so
2330 2339 # handle_magic can work correctly. Note that this works even if
2331 2340 # the line is indented, so it handles multi_line_specials
2332 2341 # properly.
2333 2342 new_rest = line.lstrip()[2:]
2334 2343 line_info.line = '%ssx %s' % (self.ESC_MAGIC,new_rest)
2335 2344 line_info.iFun = 'sx'
2336 2345 line_info.theRest = new_rest
2337 2346 return self.handle_magic(line_info)
2338 2347 else:
2339 2348 cmd = line.lstrip().lstrip('!')
2340 2349 line_out = '%s_ip.system(%s)' % (line_info.preWhitespace,
2341 2350 make_quoted_expr(cmd))
2342 2351 # update cache/log and return
2343 2352 self.log(line,line_out,line_info.continue_prompt)
2344 2353 return line_out
2345 2354
2346 2355 def handle_magic(self, line_info):
2347 2356 """Execute magic functions."""
2348 2357 iFun = line_info.iFun
2349 2358 theRest = line_info.theRest
2350 2359 cmd = '%s_ip.magic(%s)' % (line_info.preWhitespace,
2351 2360 make_quoted_expr(iFun + " " + theRest))
2352 2361 self.log(line_info.line,cmd,line_info.continue_prompt)
2353 2362 #print 'in handle_magic, cmd=<%s>' % cmd # dbg
2354 2363 return cmd
2355 2364
2356 2365 def handle_auto(self, line_info):
2357 2366 """Hande lines which can be auto-executed, quoting if requested."""
2358 2367
2359 2368 line = line_info.line
2360 2369 iFun = line_info.iFun
2361 2370 theRest = line_info.theRest
2362 2371 pre = line_info.pre
2363 2372 continue_prompt = line_info.continue_prompt
2364 2373 obj = line_info.ofind(self)['obj']
2365 2374
2366 2375 #print 'pre <%s> iFun <%s> rest <%s>' % (pre,iFun,theRest) # dbg
2367 2376
2368 2377 # This should only be active for single-line input!
2369 2378 if continue_prompt:
2370 2379 self.log(line,line,continue_prompt)
2371 2380 return line
2372 2381
2373 2382 force_auto = isinstance(obj, IPython.ipapi.IPyAutocall)
2374 2383 auto_rewrite = True
2375 2384
2376 2385 if pre == self.ESC_QUOTE:
2377 2386 # Auto-quote splitting on whitespace
2378 2387 newcmd = '%s("%s")' % (iFun,'", "'.join(theRest.split()) )
2379 2388 elif pre == self.ESC_QUOTE2:
2380 2389 # Auto-quote whole string
2381 2390 newcmd = '%s("%s")' % (iFun,theRest)
2382 2391 elif pre == self.ESC_PAREN:
2383 2392 newcmd = '%s(%s)' % (iFun,",".join(theRest.split()))
2384 2393 else:
2385 2394 # Auto-paren.
2386 2395 # We only apply it to argument-less calls if the autocall
2387 2396 # parameter is set to 2. We only need to check that autocall is <
2388 2397 # 2, since this function isn't called unless it's at least 1.
2389 2398 if not theRest and (self.rc.autocall < 2) and not force_auto:
2390 2399 newcmd = '%s %s' % (iFun,theRest)
2391 2400 auto_rewrite = False
2392 2401 else:
2393 2402 if not force_auto and theRest.startswith('['):
2394 2403 if hasattr(obj,'__getitem__'):
2395 2404 # Don't autocall in this case: item access for an object
2396 2405 # which is BOTH callable and implements __getitem__.
2397 2406 newcmd = '%s %s' % (iFun,theRest)
2398 2407 auto_rewrite = False
2399 2408 else:
2400 2409 # if the object doesn't support [] access, go ahead and
2401 2410 # autocall
2402 2411 newcmd = '%s(%s)' % (iFun.rstrip(),theRest)
2403 2412 elif theRest.endswith(';'):
2404 2413 newcmd = '%s(%s);' % (iFun.rstrip(),theRest[:-1])
2405 2414 else:
2406 2415 newcmd = '%s(%s)' % (iFun.rstrip(), theRest)
2407 2416
2408 2417 if auto_rewrite:
2409 2418 rw = self.outputcache.prompt1.auto_rewrite() + newcmd
2410 2419
2411 2420 try:
2412 2421 # plain ascii works better w/ pyreadline, on some machines, so
2413 2422 # we use it and only print uncolored rewrite if we have unicode
2414 2423 rw = str(rw)
2415 2424 print >>Term.cout, rw
2416 2425 except UnicodeEncodeError:
2417 2426 print "-------------->" + newcmd
2418 2427
2419 2428 # log what is now valid Python, not the actual user input (without the
2420 2429 # final newline)
2421 2430 self.log(line,newcmd,continue_prompt)
2422 2431 return newcmd
2423 2432
2424 2433 def handle_help(self, line_info):
2425 2434 """Try to get some help for the object.
2426 2435
2427 2436 obj? or ?obj -> basic information.
2428 2437 obj?? or ??obj -> more details.
2429 2438 """
2430 2439
2431 2440 line = line_info.line
2432 2441 # We need to make sure that we don't process lines which would be
2433 2442 # otherwise valid python, such as "x=1 # what?"
2434 2443 try:
2435 2444 codeop.compile_command(line)
2436 2445 except SyntaxError:
2437 2446 # We should only handle as help stuff which is NOT valid syntax
2438 2447 if line[0]==self.ESC_HELP:
2439 2448 line = line[1:]
2440 2449 elif line[-1]==self.ESC_HELP:
2441 2450 line = line[:-1]
2442 2451 self.log(line,'#?'+line,line_info.continue_prompt)
2443 2452 if line:
2444 2453 #print 'line:<%r>' % line # dbg
2445 2454 self.magic_pinfo(line)
2446 2455 else:
2447 2456 page(self.usage,screen_lines=self.rc.screen_length)
2448 2457 return '' # Empty string is needed here!
2449 2458 except:
2450 2459 # Pass any other exceptions through to the normal handler
2451 2460 return self.handle_normal(line_info)
2452 2461 else:
2453 2462 # If the code compiles ok, we should handle it normally
2454 2463 return self.handle_normal(line_info)
2455 2464
2456 2465 def getapi(self):
2457 2466 """ Get an IPApi object for this shell instance
2458 2467
2459 2468 Getting an IPApi object is always preferable to accessing the shell
2460 2469 directly, but this holds true especially for extensions.
2461 2470
2462 2471 It should always be possible to implement an extension with IPApi
2463 2472 alone. If not, contact maintainer to request an addition.
2464 2473
2465 2474 """
2466 2475 return self.api
2467 2476
2468 2477 def handle_emacs(self, line_info):
2469 2478 """Handle input lines marked by python-mode."""
2470 2479
2471 2480 # Currently, nothing is done. Later more functionality can be added
2472 2481 # here if needed.
2473 2482
2474 2483 # The input cache shouldn't be updated
2475 2484 return line_info.line
2476 2485
2477 2486
2478 2487 def mktempfile(self,data=None):
2479 2488 """Make a new tempfile and return its filename.
2480 2489
2481 2490 This makes a call to tempfile.mktemp, but it registers the created
2482 2491 filename internally so ipython cleans it up at exit time.
2483 2492
2484 2493 Optional inputs:
2485 2494
2486 2495 - data(None): if data is given, it gets written out to the temp file
2487 2496 immediately, and the file is closed again."""
2488 2497
2489 2498 filename = tempfile.mktemp('.py','ipython_edit_')
2490 2499 self.tempfiles.append(filename)
2491 2500
2492 2501 if data:
2493 2502 tmp_file = open(filename,'w')
2494 2503 tmp_file.write(data)
2495 2504 tmp_file.close()
2496 2505 return filename
2497 2506
2498 2507 def write(self,data):
2499 2508 """Write a string to the default output"""
2500 2509 Term.cout.write(data)
2501 2510
2502 2511 def write_err(self,data):
2503 2512 """Write a string to the default error output"""
2504 2513 Term.cerr.write(data)
2505 2514
2506 2515 def ask_exit(self):
2507 2516 """ Call for exiting. Can be overiden and used as a callback. """
2508 2517 self.exit_now = True
2509 2518
2510 2519 def exit(self):
2511 2520 """Handle interactive exit.
2512 2521
2513 2522 This method calls the ask_exit callback."""
2514 2523
2515 2524 if self.rc.confirm_exit:
2516 2525 if self.ask_yes_no('Do you really want to exit ([y]/n)?','y'):
2517 2526 self.ask_exit()
2518 2527 else:
2519 2528 self.ask_exit()
2520 2529
2521 2530 def safe_execfile(self,fname,*where,**kw):
2522 2531 """A safe version of the builtin execfile().
2523 2532
2524 2533 This version will never throw an exception, and knows how to handle
2525 2534 ipython logs as well.
2526 2535
2527 2536 :Parameters:
2528 2537 fname : string
2529 2538 Name of the file to be executed.
2530 2539
2531 2540 where : tuple
2532 2541 One or two namespaces, passed to execfile() as (globals,locals).
2533 2542 If only one is given, it is passed as both.
2534 2543
2535 2544 :Keywords:
2536 2545 islog : boolean (False)
2537 2546
2538 2547 quiet : boolean (True)
2539 2548
2540 2549 exit_ignore : boolean (False)
2541 2550 """
2542 2551
2543 2552 def syspath_cleanup():
2544 2553 """Internal cleanup routine for sys.path."""
2545 2554 if add_dname:
2546 2555 try:
2547 2556 sys.path.remove(dname)
2548 2557 except ValueError:
2549 2558 # For some reason the user has already removed it, ignore.
2550 2559 pass
2551 2560
2552 2561 fname = os.path.expanduser(fname)
2553 2562
2554 2563 # Find things also in current directory. This is needed to mimic the
2555 2564 # behavior of running a script from the system command line, where
2556 2565 # Python inserts the script's directory into sys.path
2557 2566 dname = os.path.dirname(os.path.abspath(fname))
2558 2567 add_dname = False
2559 2568 if dname not in sys.path:
2560 2569 sys.path.insert(0,dname)
2561 2570 add_dname = True
2562 2571
2563 2572 try:
2564 2573 xfile = open(fname)
2565 2574 except:
2566 2575 print >> Term.cerr, \
2567 2576 'Could not open file <%s> for safe execution.' % fname
2568 2577 syspath_cleanup()
2569 2578 return None
2570 2579
2571 2580 kw.setdefault('islog',0)
2572 2581 kw.setdefault('quiet',1)
2573 2582 kw.setdefault('exit_ignore',0)
2574 2583
2575 2584 first = xfile.readline()
2576 2585 loghead = str(self.loghead_tpl).split('\n',1)[0].strip()
2577 2586 xfile.close()
2578 2587 # line by line execution
2579 2588 if first.startswith(loghead) or kw['islog']:
2580 2589 print 'Loading log file <%s> one line at a time...' % fname
2581 2590 if kw['quiet']:
2582 2591 stdout_save = sys.stdout
2583 2592 sys.stdout = StringIO.StringIO()
2584 2593 try:
2585 2594 globs,locs = where[0:2]
2586 2595 except:
2587 2596 try:
2588 2597 globs = locs = where[0]
2589 2598 except:
2590 2599 globs = locs = globals()
2591 2600 badblocks = []
2592 2601
2593 2602 # we also need to identify indented blocks of code when replaying
2594 2603 # logs and put them together before passing them to an exec
2595 2604 # statement. This takes a bit of regexp and look-ahead work in the
2596 2605 # file. It's easiest if we swallow the whole thing in memory
2597 2606 # first, and manually walk through the lines list moving the
2598 2607 # counter ourselves.
2599 2608 indent_re = re.compile('\s+\S')
2600 2609 xfile = open(fname)
2601 2610 filelines = xfile.readlines()
2602 2611 xfile.close()
2603 2612 nlines = len(filelines)
2604 2613 lnum = 0
2605 2614 while lnum < nlines:
2606 2615 line = filelines[lnum]
2607 2616 lnum += 1
2608 2617 # don't re-insert logger status info into cache
2609 2618 if line.startswith('#log#'):
2610 2619 continue
2611 2620 else:
2612 2621 # build a block of code (maybe a single line) for execution
2613 2622 block = line
2614 2623 try:
2615 2624 next = filelines[lnum] # lnum has already incremented
2616 2625 except:
2617 2626 next = None
2618 2627 while next and indent_re.match(next):
2619 2628 block += next
2620 2629 lnum += 1
2621 2630 try:
2622 2631 next = filelines[lnum]
2623 2632 except:
2624 2633 next = None
2625 2634 # now execute the block of one or more lines
2626 2635 try:
2627 2636 exec block in globs,locs
2628 2637 except SystemExit:
2629 2638 pass
2630 2639 except:
2631 2640 badblocks.append(block.rstrip())
2632 2641 if kw['quiet']: # restore stdout
2633 2642 sys.stdout.close()
2634 2643 sys.stdout = stdout_save
2635 2644 print 'Finished replaying log file <%s>' % fname
2636 2645 if badblocks:
2637 2646 print >> sys.stderr, ('\nThe following lines/blocks in file '
2638 2647 '<%s> reported errors:' % fname)
2639 2648
2640 2649 for badline in badblocks:
2641 2650 print >> sys.stderr, badline
2642 2651 else: # regular file execution
2643 2652 try:
2644 2653 if sys.platform == 'win32' and sys.version_info < (2,5,1):
2645 2654 # Work around a bug in Python for Windows. The bug was
2646 2655 # fixed in in Python 2.5 r54159 and 54158, but that's still
2647 2656 # SVN Python as of March/07. For details, see:
2648 2657 # http://projects.scipy.org/ipython/ipython/ticket/123
2649 2658 try:
2650 2659 globs,locs = where[0:2]
2651 2660 except:
2652 2661 try:
2653 2662 globs = locs = where[0]
2654 2663 except:
2655 2664 globs = locs = globals()
2656 2665 exec file(fname) in globs,locs
2657 2666 else:
2658 2667 execfile(fname,*where)
2659 2668 except SyntaxError:
2660 2669 self.showsyntaxerror()
2661 2670 warn('Failure executing file: <%s>' % fname)
2662 2671 except SystemExit,status:
2663 2672 # Code that correctly sets the exit status flag to success (0)
2664 2673 # shouldn't be bothered with a traceback. Note that a plain
2665 2674 # sys.exit() does NOT set the message to 0 (it's empty) so that
2666 2675 # will still get a traceback. Note that the structure of the
2667 2676 # SystemExit exception changed between Python 2.4 and 2.5, so
2668 2677 # the checks must be done in a version-dependent way.
2669 2678 show = False
2670 2679
2671 2680 if sys.version_info[:2] > (2,5):
2672 2681 if status.message!=0 and not kw['exit_ignore']:
2673 2682 show = True
2674 2683 else:
2675 2684 if status.code and not kw['exit_ignore']:
2676 2685 show = True
2677 2686 if show:
2678 2687 self.showtraceback()
2679 2688 warn('Failure executing file: <%s>' % fname)
2680 2689 except:
2681 2690 self.showtraceback()
2682 2691 warn('Failure executing file: <%s>' % fname)
2683 2692
2684 2693 syspath_cleanup()
2685 2694
2686 2695 #************************* end of file <iplib.py> *****************************
1 NO CONTENT: modified file chmod 100755 => 100644
@@ -1,125 +1,124
1 1 # encoding: utf-8
2 2
3 3 """Default kernel configuration."""
4 4
5 5 __docformat__ = "restructuredtext en"
6 6
7 7 #-------------------------------------------------------------------------------
8 8 # Copyright (C) 2008 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-------------------------------------------------------------------------------
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Imports
16 16 #-------------------------------------------------------------------------------
17 17
18 from os.path import join as pjoin
19
18 20 from IPython.external.configobj import ConfigObj
19 21 from IPython.config.api import ConfigObjManager
20 from IPython.config.cutils import get_ipython_dir
22 from IPython.genutils import get_ipython_dir, get_security_dir
21 23
22 24 default_kernel_config = ConfigObj()
23 25
24 try:
25 ipython_dir = get_ipython_dir() + '/'
26 except:
27 # This will defaults to the cwd
28 ipython_dir = ''
26 security_dir = get_security_dir()
29 27
30 28 #-------------------------------------------------------------------------------
31 29 # Engine Configuration
32 30 #-------------------------------------------------------------------------------
33 31
34 32 engine_config = dict(
35 33 logfile = '', # Empty means log to stdout
36 furl_file = ipython_dir + 'ipcontroller-engine.furl'
34 furl_file = pjoin(security_dir, 'ipcontroller-engine.furl')
37 35 )
38 36
39 37 #-------------------------------------------------------------------------------
40 38 # MPI Configuration
41 39 #-------------------------------------------------------------------------------
42 40
43 41 mpi_config = dict(
44 42 mpi4py = """from mpi4py import MPI as mpi
45 43 mpi.size = mpi.COMM_WORLD.Get_size()
46 44 mpi.rank = mpi.COMM_WORLD.Get_rank()
47 45 """,
48 46 pytrilinos = """from PyTrilinos import Epetra
49 47 class SimpleStruct:
50 48 pass
51 49 mpi = SimpleStruct()
52 50 mpi.rank = 0
53 51 mpi.size = 0
54 52 """,
55 53 default = ''
56 54 )
57 55
58 56 #-------------------------------------------------------------------------------
59 57 # Controller Configuration
60 58 #-------------------------------------------------------------------------------
61 59
62 60 controller_config = dict(
63 61
64 62 logfile = '', # Empty means log to stdout
65 63 import_statement = '',
64 reuse_furls = False, # If False, old furl files are deleted
66 65
67 66 engine_tub = dict(
68 67 ip = '', # Empty string means all interfaces
69 68 port = 0, # 0 means pick a port for me
70 69 location = '', # Empty string means try to set automatically
71 70 secure = True,
72 cert_file = ipython_dir + 'ipcontroller-engine.pem',
71 cert_file = pjoin(security_dir, 'ipcontroller-engine.pem'),
73 72 ),
74 73 engine_fc_interface = 'IPython.kernel.enginefc.IFCControllerBase',
75 engine_furl_file = ipython_dir + 'ipcontroller-engine.furl',
74 engine_furl_file = pjoin(security_dir, 'ipcontroller-engine.furl'),
76 75
77 76 controller_interfaces = dict(
78 77 # multiengine = dict(
79 78 # controller_interface = 'IPython.kernel.multiengine.IMultiEngine',
80 79 # fc_interface = 'IPython.kernel.multienginefc.IFCMultiEngine',
81 80 # furl_file = 'ipcontroller-mec.furl'
82 81 # ),
83 82 task = dict(
84 83 controller_interface = 'IPython.kernel.task.ITaskController',
85 84 fc_interface = 'IPython.kernel.taskfc.IFCTaskController',
86 furl_file = ipython_dir + 'ipcontroller-tc.furl'
85 furl_file = pjoin(security_dir, 'ipcontroller-tc.furl')
87 86 ),
88 87 multiengine = dict(
89 88 controller_interface = 'IPython.kernel.multiengine.IMultiEngine',
90 89 fc_interface = 'IPython.kernel.multienginefc.IFCSynchronousMultiEngine',
91 furl_file = ipython_dir + 'ipcontroller-mec.furl'
90 furl_file = pjoin(security_dir, 'ipcontroller-mec.furl')
92 91 )
93 92 ),
94 93
95 94 client_tub = dict(
96 95 ip = '', # Empty string means all interfaces
97 96 port = 0, # 0 means pick a port for me
98 97 location = '', # Empty string means try to set automatically
99 98 secure = True,
100 cert_file = ipython_dir + 'ipcontroller-client.pem'
99 cert_file = pjoin(security_dir, 'ipcontroller-client.pem')
101 100 )
102 101 )
103 102
104 103 #-------------------------------------------------------------------------------
105 104 # Client Configuration
106 105 #-------------------------------------------------------------------------------
107 106
108 107 client_config = dict(
109 108 client_interfaces = dict(
110 109 task = dict(
111 furl_file = ipython_dir + 'ipcontroller-tc.furl'
110 furl_file = pjoin(security_dir, 'ipcontroller-tc.furl')
112 111 ),
113 112 multiengine = dict(
114 furl_file = ipython_dir + 'ipcontroller-mec.furl'
113 furl_file = pjoin(security_dir, 'ipcontroller-mec.furl')
115 114 )
116 115 )
117 116 )
118 117
119 118 default_kernel_config['engine'] = engine_config
120 119 default_kernel_config['mpi'] = mpi_config
121 120 default_kernel_config['controller'] = controller_config
122 121 default_kernel_config['client'] = client_config
123 122
124 123
125 124 config_manager = ConfigObjManager(default_kernel_config, 'IPython.kernel.ini') No newline at end of file
@@ -1,143 +1,141
1 1 # encoding: utf-8
2 2 # -*- test-case-name: IPython.kernel.test.test_contexts -*-
3 3 """Context managers for IPython.
4 4
5 5 Python 2.5 introduced the `with` statement, which is based on the context
6 6 manager protocol. This module offers a few context managers for common cases,
7 7 which can also be useful as templates for writing new, application-specific
8 8 managers.
9 9 """
10 10
11 from __future__ import with_statement
12
13 11 __docformat__ = "restructuredtext en"
14 12
15 13 #-------------------------------------------------------------------------------
16 14 # Copyright (C) 2008 The IPython Development Team
17 15 #
18 16 # Distributed under the terms of the BSD License. The full license is in
19 17 # the file COPYING, distributed as part of this software.
20 18 #-------------------------------------------------------------------------------
21 19
22 20 #-------------------------------------------------------------------------------
23 21 # Imports
24 22 #-------------------------------------------------------------------------------
25 23
26 24 import linecache
27 25 import sys
28 26
29 27 from twisted.internet.error import ConnectionRefusedError
30 28
31 29 from IPython.ultraTB import _fixed_getinnerframes, findsource
32 30 from IPython import ipapi
33 31
34 32 from IPython.kernel import error
35 33
36 34 #---------------------------------------------------------------------------
37 35 # Utility functions needed by all context managers.
38 36 #---------------------------------------------------------------------------
39 37
40 38 def remote():
41 39 """Raises a special exception meant to be caught by context managers.
42 40 """
43 41 m = 'Special exception to stop local execution of parallel code.'
44 42 raise error.StopLocalExecution(m)
45 43
46 44
47 45 def strip_whitespace(source,require_remote=True):
48 46 """strip leading whitespace from input source.
49 47
50 48 :Parameters:
51 49
52 50 """
53 51 remote_mark = 'remote()'
54 52 # Expand tabs to avoid any confusion.
55 53 wsource = [l.expandtabs(4) for l in source]
56 54 # Detect the indentation level
57 55 done = False
58 56 for line in wsource:
59 57 if line.isspace():
60 58 continue
61 59 for col,char in enumerate(line):
62 60 if char != ' ':
63 61 done = True
64 62 break
65 63 if done:
66 64 break
67 65 # Now we know how much leading space there is in the code. Next, we
68 66 # extract up to the first line that has less indentation.
69 67 # WARNINGS: we skip comments that may be misindented, but we do NOT yet
70 68 # detect triple quoted strings that may have flush left text.
71 69 for lno,line in enumerate(wsource):
72 70 lead = line[:col]
73 71 if lead.isspace():
74 72 continue
75 73 else:
76 74 if not lead.lstrip().startswith('#'):
77 75 break
78 76 # The real 'with' source is up to lno
79 77 src_lines = [l[col:] for l in wsource[:lno+1]]
80 78
81 79 # Finally, check that the source's first non-comment line begins with the
82 80 # special call 'remote()'
83 81 if require_remote:
84 82 for nline,line in enumerate(src_lines):
85 83 if line.isspace() or line.startswith('#'):
86 84 continue
87 85 if line.startswith(remote_mark):
88 86 break
89 87 else:
90 88 raise ValueError('%s call missing at the start of code' %
91 89 remote_mark)
92 90 out_lines = src_lines[nline+1:]
93 91 else:
94 92 # If the user specified that the remote() call wasn't mandatory
95 93 out_lines = src_lines
96 94
97 95 # src = ''.join(out_lines) # dbg
98 96 #print 'SRC:\n<<<<<<<>>>>>>>\n%s<<<<<>>>>>>' % src # dbg
99 97 return ''.join(out_lines)
100 98
101 99 class RemoteContextBase(object):
102 100 def __init__(self):
103 101 self.ip = ipapi.get()
104 102
105 103 def _findsource_file(self,f):
106 104 linecache.checkcache()
107 105 s = findsource(f.f_code)
108 106 lnum = f.f_lineno
109 107 wsource = s[0][f.f_lineno:]
110 108 return strip_whitespace(wsource)
111 109
112 110 def _findsource_ipython(self,f):
113 111 from IPython import ipapi
114 112 self.ip = ipapi.get()
115 113 buf = self.ip.IP.input_hist_raw[-1].splitlines()[1:]
116 114 wsource = [l+'\n' for l in buf ]
117 115
118 116 return strip_whitespace(wsource)
119 117
120 118 def findsource(self,frame):
121 119 local_ns = frame.f_locals
122 120 global_ns = frame.f_globals
123 121 if frame.f_code.co_filename == '<ipython console>':
124 122 src = self._findsource_ipython(frame)
125 123 else:
126 124 src = self._findsource_file(frame)
127 125 return src
128 126
129 127 def __enter__(self):
130 128 raise NotImplementedError
131 129
132 130 def __exit__ (self, etype, value, tb):
133 131 if issubclass(etype,error.StopLocalExecution):
134 132 return True
135 133
136 134 class RemoteMultiEngine(RemoteContextBase):
137 135 def __init__(self,mec):
138 136 self.mec = mec
139 137 RemoteContextBase.__init__(self)
140 138
141 139 def __enter__(self):
142 140 src = self.findsource(sys._getframe(1))
143 141 return self.mec.execute(src)
@@ -1,376 +1,376
1 1 # encoding: utf-8
2 2 # -*- test-case-name: IPython.kernel.test.test_controllerservice -*-
3 3
4 4 """A Twisted Service for the IPython Controller.
5 5
6 6 The IPython Controller:
7 7
8 8 * Listens for Engines to connect and then manages access to those engines.
9 9 * Listens for clients and passes commands from client to the Engines.
10 10 * Exposes an asynchronous interfaces to the Engines which themselves can block.
11 11 * Acts as a gateway to the Engines.
12 12
13 13 The design of the controller is somewhat abstract to allow flexibility in how
14 14 the controller is presented to clients. This idea is that there is a basic
15 15 ControllerService class that allows engines to connect to it. But, this
16 16 basic class has no client interfaces. To expose client interfaces developers
17 17 provide an adapter that makes the ControllerService look like something. For
18 18 example, one client interface might support task farming and another might
19 19 support interactive usage. The important thing is that by using interfaces
20 20 and adapters, a single controller can be accessed from multiple interfaces.
21 21 Furthermore, by adapting various client interfaces to various network
22 22 protocols, each client interface can be exposed to multiple network protocols.
23 23 See multiengine.py for an example of how to adapt the ControllerService
24 24 to a client interface.
25 25 """
26 26
27 27 __docformat__ = "restructuredtext en"
28 28
29 29 #-------------------------------------------------------------------------------
30 30 # Copyright (C) 2008 The IPython Development Team
31 31 #
32 32 # Distributed under the terms of the BSD License. The full license is in
33 33 # the file COPYING, distributed as part of this software.
34 34 #-------------------------------------------------------------------------------
35 35
36 36 #-------------------------------------------------------------------------------
37 37 # Imports
38 38 #-------------------------------------------------------------------------------
39 39
40 40 import os, sys
41 41
42 42 from twisted.application import service
43 43 from twisted.internet import defer, reactor
44 44 from twisted.python import log, components
45 45 from zope.interface import Interface, implements, Attribute
46 46 import zope.interface as zi
47 47
48 48 from IPython.kernel.engineservice import \
49 49 IEngineCore, \
50 50 IEngineSerialized, \
51 51 IEngineQueued
52 52
53 from IPython.config import cutils
53 from IPython.genutils import get_ipython_dir
54 54 from IPython.kernel import codeutil
55 55
56 56 #-------------------------------------------------------------------------------
57 57 # Interfaces for the Controller
58 58 #-------------------------------------------------------------------------------
59 59
60 60 class IControllerCore(Interface):
61 61 """Basic methods any controller must have.
62 62
63 63 This is basically the aspect of the controller relevant to the
64 64 engines and does not assume anything about how the engines will
65 65 be presented to a client.
66 66 """
67 67
68 68 engines = Attribute("A dict of engine ids and engine instances.")
69 69
70 70 def register_engine(remoteEngine, id=None, ip=None, port=None,
71 71 pid=None):
72 72 """Register new remote engine.
73 73
74 74 The controller can use the ip, port, pid of the engine to do useful things
75 75 like kill the engines.
76 76
77 77 :Parameters:
78 78 remoteEngine
79 79 An implementer of IEngineCore, IEngineSerialized and IEngineQueued.
80 80 id : int
81 81 Requested id.
82 82 ip : str
83 83 IP address the engine is running on.
84 84 port : int
85 85 Port the engine is on.
86 86 pid : int
87 87 pid of the running engine.
88 88
89 89 :Returns: A dict of {'id':id} and possibly other key, value pairs.
90 90 """
91 91
92 92 def unregister_engine(id):
93 93 """Handle a disconnecting engine.
94 94
95 95 :Parameters:
96 96 id
97 97 The integer engine id of the engine to unregister.
98 98 """
99 99
100 100 def on_register_engine_do(f, includeID, *args, **kwargs):
101 101 """Call ``f(*args, **kwargs)`` when an engine is registered.
102 102
103 103 :Parameters:
104 104 includeID : int
105 105 If True the first argument to f will be the id of the engine.
106 106 """
107 107
108 108 def on_unregister_engine_do(f, includeID, *args, **kwargs):
109 109 """Call ``f(*args, **kwargs)`` when an engine is unregistered.
110 110
111 111 :Parameters:
112 112 includeID : int
113 113 If True the first argument to f will be the id of the engine.
114 114 """
115 115
116 116 def on_register_engine_do_not(f):
117 117 """Stop calling f on engine registration"""
118 118
119 119 def on_unregister_engine_do_not(f):
120 120 """Stop calling f on engine unregistration"""
121 121
122 122 def on_n_engines_registered_do(n, f, *arg, **kwargs):
123 123 """Call f(*args, **kwargs) the first time the nth engine registers."""
124 124
125 125 class IControllerBase(IControllerCore):
126 126 """The basic controller interface."""
127 127 pass
128 128
129 129
130 130 #-------------------------------------------------------------------------------
131 131 # Implementation of the ControllerService
132 132 #-------------------------------------------------------------------------------
133 133
134 134 class ControllerService(object, service.Service):
135 135 """A basic Controller represented as a Twisted Service.
136 136
137 137 This class doesn't implement any client notification mechanism. That
138 138 is up to adapted subclasses.
139 139 """
140 140
141 141 # I also pick up the IService interface by inheritance from service.Service
142 142 implements(IControllerBase)
143 143 name = 'ControllerService'
144 144
145 145 def __init__(self, maxEngines=511, saveIDs=False):
146 146 self.saveIDs = saveIDs
147 147 self.engines = {}
148 148 self.availableIDs = range(maxEngines,-1,-1) # [511,...,0]
149 149 self._onRegister = []
150 150 self._onUnregister = []
151 151 self._onNRegistered = []
152 152
153 153 #---------------------------------------------------------------------------
154 154 # Methods used to save the engine info to a log file
155 155 #---------------------------------------------------------------------------
156 156
157 157 def _buildEngineInfoString(self, id, ip, port, pid):
158 158 if id is None:
159 159 id = -99
160 160 if ip is None:
161 161 ip = "-99"
162 162 if port is None:
163 163 port = -99
164 164 if pid is None:
165 165 pid = -99
166 166 return "Engine Info: %d %s %d %d" % (id, ip , port, pid)
167 167
168 168 def _logEngineInfo(self, id, ip, port, pid):
169 169 log.msg(self._buildEngineInfoString(id,ip,port,pid))
170 170
171 171 def _getEngineInfoLogFile(self):
172 172 # Store all logs inside the ipython directory
173 ipdir = cutils.get_ipython_dir()
173 ipdir = get_ipython_dir()
174 174 pjoin = os.path.join
175 175 logdir_base = pjoin(ipdir,'log')
176 176 if not os.path.isdir(logdir_base):
177 177 os.makedirs(logdir_base)
178 178 logfile = os.path.join(logdir_base,'ipcontroller-%s-engine-info.log' % os.getpid())
179 179 return logfile
180 180
181 181 def _logEngineInfoToFile(self, id, ip, port, pid):
182 182 """Log info about an engine to a log file.
183 183
184 184 When an engine registers with a ControllerService, the ControllerService
185 185 saves information about the engine to a log file. That information
186 186 can be useful for various purposes, such as killing hung engines, etc.
187 187
188 188 This method takes the assigned id, ip/port and pid of the engine
189 189 and saves it to a file of the form:
190 190
191 191 ~/.ipython/log/ipcontroller-###-engine-info.log
192 192
193 193 where ### is the pid of the controller.
194 194
195 195 Each line of this file has the form:
196 196
197 197 Engine Info: ip ip port pid
198 198
199 199 If any of the entries are not known, they are replaced by -99.
200 200 """
201 201
202 202 fname = self._getEngineInfoLogFile()
203 203 f = open(fname, 'a')
204 204 s = self._buildEngineInfoString(id,ip,port,pid)
205 205 f.write(s + '\n')
206 206 f.close()
207 207
208 208 #---------------------------------------------------------------------------
209 209 # IControllerCore methods
210 210 #---------------------------------------------------------------------------
211 211
212 212 def register_engine(self, remoteEngine, id=None,
213 213 ip=None, port=None, pid=None):
214 214 """Register new engine connection"""
215 215
216 216 # What happens if these assertions fail?
217 217 assert IEngineCore.providedBy(remoteEngine), \
218 218 "engine passed to register_engine doesn't provide IEngineCore"
219 219 assert IEngineSerialized.providedBy(remoteEngine), \
220 220 "engine passed to register_engine doesn't provide IEngineSerialized"
221 221 assert IEngineQueued.providedBy(remoteEngine), \
222 222 "engine passed to register_engine doesn't provide IEngineQueued"
223 223 assert isinstance(id, int) or id is None, \
224 224 "id to register_engine must be an integer or None"
225 225 assert isinstance(ip, str) or ip is None, \
226 226 "ip to register_engine must be a string or None"
227 227 assert isinstance(port, int) or port is None, \
228 228 "port to register_engine must be an integer or None"
229 229 assert isinstance(pid, int) or pid is None, \
230 230 "pid to register_engine must be an integer or None"
231 231
232 232 desiredID = id
233 233 if desiredID in self.engines.keys():
234 234 desiredID = None
235 235
236 236 if desiredID in self.availableIDs:
237 237 getID = desiredID
238 238 self.availableIDs.remove(desiredID)
239 239 else:
240 240 getID = self.availableIDs.pop()
241 241 remoteEngine.id = getID
242 242 remoteEngine.service = self
243 243 self.engines[getID] = remoteEngine
244 244
245 245 # Log the Engine Information for monitoring purposes
246 246 self._logEngineInfoToFile(getID, ip, port, pid)
247 247
248 248 msg = "registered engine with id: %i" %getID
249 249 log.msg(msg)
250 250
251 251 for i in range(len(self._onRegister)):
252 252 (f,args,kwargs,ifid) = self._onRegister[i]
253 253 try:
254 254 if ifid:
255 255 f(getID, *args, **kwargs)
256 256 else:
257 257 f(*args, **kwargs)
258 258 except:
259 259 self._onRegister.pop(i)
260 260
261 261 # Call functions when the nth engine is registered and them remove them
262 262 for i, (n, f, args, kwargs) in enumerate(self._onNRegistered):
263 263 if len(self.engines.keys()) == n:
264 264 try:
265 265 try:
266 266 f(*args, **kwargs)
267 267 except:
268 268 log.msg("Function %r failed when the %ith engine registered" % (f, n))
269 269 finally:
270 270 self._onNRegistered.pop(i)
271 271
272 272 return {'id':getID}
273 273
274 274 def unregister_engine(self, id):
275 275 """Unregister engine by id."""
276 276
277 277 assert isinstance(id, int) or id is None, \
278 278 "id to unregister_engine must be an integer or None"
279 279
280 280 msg = "unregistered engine with id: %i" %id
281 281 log.msg(msg)
282 282 try:
283 283 del self.engines[id]
284 284 except KeyError:
285 285 log.msg("engine with id %i was not registered" % id)
286 286 else:
287 287 if not self.saveIDs:
288 288 self.availableIDs.append(id)
289 289 # Sort to assign lower ids first
290 290 self.availableIDs.sort(reverse=True)
291 291 else:
292 292 log.msg("preserving id %i" %id)
293 293
294 294 for i in range(len(self._onUnregister)):
295 295 (f,args,kwargs,ifid) = self._onUnregister[i]
296 296 try:
297 297 if ifid:
298 298 f(id, *args, **kwargs)
299 299 else:
300 300 f(*args, **kwargs)
301 301 except:
302 302 self._onUnregister.pop(i)
303 303
304 304 def on_register_engine_do(self, f, includeID, *args, **kwargs):
305 305 assert callable(f), "f must be callable"
306 306 self._onRegister.append((f,args,kwargs,includeID))
307 307
308 308 def on_unregister_engine_do(self, f, includeID, *args, **kwargs):
309 309 assert callable(f), "f must be callable"
310 310 self._onUnregister.append((f,args,kwargs,includeID))
311 311
312 312 def on_register_engine_do_not(self, f):
313 313 for i in range(len(self._onRegister)):
314 314 g = self._onRegister[i][0]
315 315 if f == g:
316 316 self._onRegister.pop(i)
317 317 return
318 318
319 319 def on_unregister_engine_do_not(self, f):
320 320 for i in range(len(self._onUnregister)):
321 321 g = self._onUnregister[i][0]
322 322 if f == g:
323 323 self._onUnregister.pop(i)
324 324 return
325 325
326 326 def on_n_engines_registered_do(self, n, f, *args, **kwargs):
327 327 if len(self.engines.keys()) >= n:
328 328 f(*args, **kwargs)
329 329 else:
330 330 self._onNRegistered.append((n,f,args,kwargs))
331 331
332 332
333 333 #-------------------------------------------------------------------------------
334 334 # Base class for adapting controller to different client APIs
335 335 #-------------------------------------------------------------------------------
336 336
337 337 class ControllerAdapterBase(object):
338 338 """All Controller adapters should inherit from this class.
339 339
340 340 This class provides a wrapped version of the IControllerBase interface that
341 341 can be used to easily create new custom controllers. Subclasses of this
342 342 will provide a full implementation of IControllerBase.
343 343
344 344 This class doesn't implement any client notification mechanism. That
345 345 is up to subclasses.
346 346 """
347 347
348 348 implements(IControllerBase)
349 349
350 350 def __init__(self, controller):
351 351 self.controller = controller
352 352 # Needed for IControllerCore
353 353 self.engines = self.controller.engines
354 354
355 355 def register_engine(self, remoteEngine, id=None,
356 356 ip=None, port=None, pid=None):
357 357 return self.controller.register_engine(remoteEngine,
358 358 id, ip, port, pid)
359 359
360 360 def unregister_engine(self, id):
361 361 return self.controller.unregister_engine(id)
362 362
363 363 def on_register_engine_do(self, f, includeID, *args, **kwargs):
364 364 return self.controller.on_register_engine_do(f, includeID, *args, **kwargs)
365 365
366 366 def on_unregister_engine_do(self, f, includeID, *args, **kwargs):
367 367 return self.controller.on_unregister_engine_do(f, includeID, *args, **kwargs)
368 368
369 369 def on_register_engine_do_not(self, f):
370 370 return self.controller.on_register_engine_do_not(f)
371 371
372 372 def on_unregister_engine_do_not(self, f):
373 373 return self.controller.on_unregister_engine_do_not(f)
374 374
375 375 def on_n_engines_registered_do(self, n, f, *args, **kwargs):
376 376 return self.controller.on_n_engines_registered_do(n, f, *args, **kwargs)
@@ -1,747 +1,754
1 1 # encoding: utf-8
2 2
3 3 """Central interpreter object for an IPython engine.
4 4
5 5 The interpreter is the object whose job is to process lines of user input and
6 6 actually execute them in the user's namespace.
7 7 """
8 8
9 9 __docformat__ = "restructuredtext en"
10 10
11 11 #-------------------------------------------------------------------------------
12 12 # Copyright (C) 2008 The IPython Development Team
13 13 #
14 14 # Distributed under the terms of the BSD License. The full license is in
15 15 # the file COPYING, distributed as part of this software.
16 16 #-------------------------------------------------------------------------------
17 17
18 18 #-------------------------------------------------------------------------------
19 19 # Imports
20 20 #-------------------------------------------------------------------------------
21 21
22 22 # Standard library imports.
23 23 from types import FunctionType
24 24
25 25 import __builtin__
26 26 import codeop
27 27 import compiler
28 28 import sys
29 29 import traceback
30 30
31 31 # Local imports.
32 32 from IPython.kernel.core import ultraTB
33 33 from IPython.kernel.core.display_trap import DisplayTrap
34 34 from IPython.kernel.core.macro import Macro
35 35 from IPython.kernel.core.prompts import CachedOutput
36 36 from IPython.kernel.core.traceback_trap import TracebackTrap
37 37 from IPython.kernel.core.util import Bunch, system_shell
38 38 from IPython.external.Itpl import ItplNS
39 39
40 40 # Global constants
41 41 COMPILER_ERROR = 'error'
42 42 INCOMPLETE_INPUT = 'incomplete'
43 43 COMPLETE_INPUT = 'complete'
44 44
45 45 ##############################################################################
46 46 # TEMPORARY!!! fake configuration, while we decide whether to use tconfig or
47 47 # not
48 48
49 49 rc = Bunch()
50 50 rc.cache_size = 100
51 51 rc.pprint = True
52 52 rc.separate_in = '\n'
53 53 rc.separate_out = '\n'
54 54 rc.separate_out2 = ''
55 55 rc.prompt_in1 = r'In [\#]: '
56 56 rc.prompt_in2 = r' .\\D.: '
57 57 rc.prompt_out = ''
58 58 rc.prompts_pad_left = False
59 59
60 60 ##############################################################################
61 61
62 62 # Top-level utilities
63 63 def default_display_formatters():
64 64 """ Return a list of default display formatters.
65 65 """
66 66
67 67 from display_formatter import PPrintDisplayFormatter, ReprDisplayFormatter
68 68 return [PPrintDisplayFormatter(), ReprDisplayFormatter()]
69 69
70 70 def default_traceback_formatters():
71 71 """ Return a list of default traceback formatters.
72 72 """
73 73
74 74 from traceback_formatter import PlainTracebackFormatter
75 75 return [PlainTracebackFormatter()]
76 76
77 77 # Top-level classes
78 78 class NotDefined(object): pass
79 79
80 80 class Interpreter(object):
81 81 """ An interpreter object.
82 82
83 83 fixme: needs to negotiate available formatters with frontends.
84 84
85 85 Important: the interpeter should be built so that it exposes a method
86 86 for each attribute/method of its sub-object. This way it can be
87 87 replaced by a network adapter.
88 88 """
89 89
90 90 def __init__(self, user_ns=None, global_ns=None,translator=None,
91 91 magic=None, display_formatters=None,
92 92 traceback_formatters=None, output_trap=None, history=None,
93 93 message_cache=None, filename='<string>', config=None):
94 94
95 95 # The local/global namespaces for code execution
96 96 local_ns = user_ns # compatibility name
97 97 if local_ns is None:
98 98 local_ns = {}
99 99 self.user_ns = local_ns
100 100 # The local namespace
101 101 if global_ns is None:
102 102 global_ns = {}
103 103 self.user_global_ns = global_ns
104 104
105 105 # An object that will translate commands into executable Python.
106 106 # The current translator does not work properly so for now we are going
107 107 # without!
108 108 # if translator is None:
109 109 # from IPython.kernel.core.translator import IPythonTranslator
110 110 # translator = IPythonTranslator()
111 111 self.translator = translator
112 112
113 113 # An object that maintains magic commands.
114 114 if magic is None:
115 115 from IPython.kernel.core.magic import Magic
116 116 magic = Magic(self)
117 117 self.magic = magic
118 118
119 119 # A list of formatters for the displayhook.
120 120 if display_formatters is None:
121 121 display_formatters = default_display_formatters()
122 122 self.display_formatters = display_formatters
123 123
124 124 # A list of formatters for tracebacks.
125 125 if traceback_formatters is None:
126 126 traceback_formatters = default_traceback_formatters()
127 127 self.traceback_formatters = traceback_formatters
128 128
129 129 # The object trapping stdout/stderr.
130 130 if output_trap is None:
131 131 from IPython.kernel.core.output_trap import OutputTrap
132 132 output_trap = OutputTrap()
133 133 self.output_trap = output_trap
134 134
135 135 # An object that manages the history.
136 136 if history is None:
137 137 from IPython.kernel.core.history import InterpreterHistory
138 138 history = InterpreterHistory()
139 139 self.history = history
140 140 self.get_history_item = history.get_history_item
141 141 self.get_history_input_cache = history.get_input_cache
142 142 self.get_history_input_after = history.get_input_after
143 143
144 144 # An object that caches all of the return messages.
145 145 if message_cache is None:
146 146 from IPython.kernel.core.message_cache import SimpleMessageCache
147 147 message_cache = SimpleMessageCache()
148 148 self.message_cache = message_cache
149 149
150 150 # The "filename" of the code that is executed in this interpreter.
151 151 self.filename = filename
152 152
153 153 # An object that contains much configuration information.
154 154 if config is None:
155 155 # fixme: Move this constant elsewhere!
156 156 config = Bunch(ESC_MAGIC='%')
157 157 self.config = config
158 158
159 159 # Hook managers.
160 160 # fixme: make the display callbacks configurable. In the meantime,
161 161 # enable macros.
162 162 self.display_trap = DisplayTrap(
163 163 formatters=self.display_formatters,
164 164 callbacks=[self._possible_macro],
165 165 )
166 166 self.traceback_trap = TracebackTrap(
167 167 formatters=self.traceback_formatters)
168 168
169 169 # This is used temporarily for reformating exceptions in certain
170 170 # cases. It will go away once the ultraTB stuff is ported
171 171 # to ipython1
172 172 self.tbHandler = ultraTB.FormattedTB(color_scheme='NoColor',
173 173 mode='Context',
174 174 tb_offset=2)
175 175
176 176 # An object that can compile commands and remember __future__
177 177 # statements.
178 178 self.command_compiler = codeop.CommandCompiler()
179 179
180 180 # A replacement for the raw_input() and input() builtins. Change these
181 181 # attributes later to configure them.
182 182 self.raw_input_builtin = raw_input
183 183 self.input_builtin = input
184 184
185 185 # The number of the current cell.
186 186 self.current_cell_number = 1
187 187
188 188 # Initialize cache, set in/out prompts and printing system
189 189 self.outputcache = CachedOutput(self,
190 190 rc.cache_size,
191 191 rc.pprint,
192 192 input_sep = rc.separate_in,
193 193 output_sep = rc.separate_out,
194 194 output_sep2 = rc.separate_out2,
195 195 ps1 = rc.prompt_in1,
196 196 ps2 = rc.prompt_in2,
197 197 ps_out = rc.prompt_out,
198 198 pad_left = rc.prompts_pad_left)
199 199
200 200 # Need to decide later if this is the right approach, but clients
201 201 # commonly use sys.ps1/2, so it may be best to just set them here
202 202 sys.ps1 = self.outputcache.prompt1.p_str
203 203 sys.ps2 = self.outputcache.prompt2.p_str
204 204
205 205 # This is the message dictionary assigned temporarily when running the
206 206 # code.
207 207 self.message = None
208 208
209 209 self.setup_namespace()
210 210
211 211
212 212 #### Public 'Interpreter' interface ########################################
213 213
214 214 def formatTraceback(self, et, ev, tb, message=''):
215 215 """Put a formatted version of the traceback into value and reraise.
216 216
217 217 When exceptions have to be sent over the network, the traceback
218 218 needs to be put into the value of the exception in a nicely
219 219 formatted way. The method takes the type, value and tb of an
220 220 exception and puts a string representation of the tb into the
221 221 value of the exception and reraises it.
222 222
223 223 Currently this method uses the ultraTb formatter from IPython trunk.
224 224 Eventually it should simply use the traceback formatters in core
225 225 that are loaded into self.tracback_trap.formatters.
226 226 """
227 227 tbinfo = self.tbHandler.text(et,ev,tb)
228 228 ev._ipython_traceback_text = tbinfo
229 229 return et, ev, tb
230 230
231 231 def execute(self, commands, raiseException=True):
232 232 """ Execute some IPython commands.
233 233
234 234 1. Translate them into Python.
235 235 2. Run them.
236 236 3. Trap stdout/stderr.
237 237 4. Trap sys.displayhook().
238 238 5. Trap exceptions.
239 239 6. Return a message object.
240 240
241 241 Parameters
242 242 ----------
243 243 commands : str
244 244 The raw commands that the user typed into the prompt.
245 245
246 246 Returns
247 247 -------
248 248 message : dict
249 249 The dictionary of responses. See the README.txt in this directory
250 250 for an explanation of the format.
251 251 """
252 252
253 253 # Create a message dictionary with all of the information we will be
254 254 # returning to the frontend and other listeners.
255 255 message = self.setup_message()
256 256
257 257 # Massage the input and store the raw and translated commands into
258 258 # a dict.
259 259 user_input = dict(raw=commands)
260 260 if self.translator is not None:
261 261 python = self.translator(commands, message)
262 262 if python is None:
263 263 # Something went wrong with the translation. The translator
264 264 # should have added an appropriate entry to the message object.
265 265 return message
266 266 else:
267 267 python = commands
268 268 user_input['translated'] = python
269 269 message['input'] = user_input
270 270
271 271 # Set the message object so that any magics executed in the code have
272 272 # access.
273 273 self.message = message
274 274
275 275 # Set all of the output/exception traps.
276 276 self.set_traps()
277 277
278 278 # Actually execute the Python code.
279 279 status = self.execute_python(python)
280 280
281 281 # Unset all of the traps.
282 282 self.unset_traps()
283 283
284 284 # Unset the message object.
285 285 self.message = None
286 286
287 287 # Update the history variables in the namespace.
288 288 # E.g. In, Out, _, __, ___
289 289 if self.history is not None:
290 290 self.history.update_history(self, python)
291 291
292 292 # Let all of the traps contribute to the message and then clear their
293 293 # stored information.
294 294 self.output_trap.add_to_message(message)
295 295 self.output_trap.clear()
296 296 self.display_trap.add_to_message(message)
297 297 self.display_trap.clear()
298 298 self.traceback_trap.add_to_message(message)
299 299 # Pull out the type, value and tb of the current exception
300 300 # before clearing it.
301 301 einfo = self.traceback_trap.args
302 302 self.traceback_trap.clear()
303 303
304 304 # Cache the message.
305 305 self.message_cache.add_message(self.current_cell_number, message)
306 306
307 307 # Bump the number.
308 308 self.current_cell_number += 1
309 309
310 310 # This conditional lets the execute method either raise any
311 311 # exception that has occured in user code OR return the message
312 312 # dict containing the traceback and other useful info.
313 313 if raiseException and einfo:
314 314 raise einfo[0],einfo[1],einfo[2]
315 315 else:
316 316 return message
317 317
318 318 def generate_prompt(self, is_continuation):
319 319 """Calculate and return a string with the prompt to display.
320 320
321 321 :Parameters:
322 322 is_continuation : bool
323 323 Whether the input line is continuing multiline input or not, so
324 324 that a proper continuation prompt can be computed."""
325 325
326 326 if is_continuation:
327 327 return str(self.outputcache.prompt2)
328 328 else:
329 329 return str(self.outputcache.prompt1)
330 330
331 331 def execute_python(self, python):
332 332 """ Actually run the Python code in the namespace.
333 333
334 334 :Parameters:
335 335
336 336 python : str
337 337 Pure, exec'able Python code. Special IPython commands should have
338 338 already been translated into pure Python.
339 339 """
340 340
341 341 # We use a CommandCompiler instance to compile the code so as to keep
342 342 # track of __future__ imports.
343 343 try:
344 344 commands = self.split_commands(python)
345 345 except (SyntaxError, IndentationError), e:
346 346 # Save the exc_info so compilation related exceptions can be
347 347 # reraised
348 348 self.traceback_trap.args = sys.exc_info()
349 349 self.pack_exception(self.message,e)
350 350 return None
351 351
352 352 for cmd in commands:
353 353 try:
354 354 code = self.command_compiler(cmd, self.filename, 'single')
355 355 except (SyntaxError, OverflowError, ValueError), e:
356 356 self.traceback_trap.args = sys.exc_info()
357 357 self.pack_exception(self.message,e)
358 358 # No point in continuing if one block raised
359 359 return None
360 360 else:
361 361 self.execute_block(code)
362 362
363 363 def execute_block(self,code):
364 364 """Execute a single block of code in the user namespace.
365 365
366 366 Return value: a flag indicating whether the code to be run completed
367 367 successfully:
368 368
369 369 - 0: successful execution.
370 370 - 1: an error occurred.
371 371 """
372 372
373 373 outflag = 1 # start by assuming error, success will reset it
374 374 try:
375 375 exec code in self.user_ns
376 376 outflag = 0
377 377 except SystemExit:
378 378 self.resetbuffer()
379 379 self.traceback_trap.args = sys.exc_info()
380 380 except:
381 381 self.traceback_trap.args = sys.exc_info()
382 382
383 383 return outflag
384 384
385 385 def execute_macro(self, macro):
386 386 """ Execute the value of a macro.
387 387
388 388 Parameters
389 389 ----------
390 390 macro : Macro
391 391 """
392 392
393 393 python = macro.value
394 394 if self.translator is not None:
395 395 python = self.translator(python)
396 396 self.execute_python(python)
397 397
398 398 def getCommand(self, i=None):
399 399 """Gets the ith message in the message_cache.
400 400
401 401 This is implemented here for compatibility with the old ipython1 shell
402 402 I am not sure we need this though. I even seem to remember that we
403 403 were going to get rid of it.
404 404 """
405 405 return self.message_cache.get_message(i)
406 406
407 407 def reset(self):
408 408 """Reset the interpreter.
409 409
410 410 Currently this only resets the users variables in the namespace.
411 411 In the future we might want to also reset the other stateful
412 412 things like that the Interpreter has, like In, Out, etc.
413 413 """
414 414 self.user_ns.clear()
415 415 self.setup_namespace()
416 416
417 417 def complete(self,line,text=None, pos=None):
418 418 """Complete the given text.
419 419
420 420 :Parameters:
421 421
422 422 text : str
423 423 Text fragment to be completed on. Typically this is
424 424 """
425 425 # fixme: implement
426 426 raise NotImplementedError
427 427
428 428 def push(self, ns):
429 429 """ Put value into the namespace with name key.
430 430
431 431 Parameters
432 432 ----------
433 433 **kwds
434 434 """
435 435
436 436 self.user_ns.update(ns)
437 437
438 438 def push_function(self, ns):
439 439 # First set the func_globals for all functions to self.user_ns
440 440 new_kwds = {}
441 441 for k, v in ns.iteritems():
442 442 if not isinstance(v, FunctionType):
443 443 raise TypeError("function object expected")
444 444 new_kwds[k] = FunctionType(v.func_code, self.user_ns)
445 445 self.user_ns.update(new_kwds)
446 446
447 447 def pack_exception(self,message,exc):
448 448 message['exception'] = exc.__class__
449 449 message['exception_value'] = \
450 450 traceback.format_exception_only(exc.__class__, exc)
451 451
452 452 def feed_block(self, source, filename='<input>', symbol='single'):
453 453 """Compile some source in the interpreter.
454 454
455 455 One several things can happen:
456 456
457 457 1) The input is incorrect; compile_command() raised an
458 458 exception (SyntaxError or OverflowError).
459 459
460 460 2) The input is incomplete, and more input is required;
461 461 compile_command() returned None. Nothing happens.
462 462
463 463 3) The input is complete; compile_command() returned a code
464 464 object. The code is executed by calling self.runcode() (which
465 465 also handles run-time exceptions, except for SystemExit).
466 466
467 467 The return value is:
468 468
469 469 - True in case 2
470 470
471 471 - False in the other cases, unless an exception is raised, where
472 472 None is returned instead. This can be used by external callers to
473 473 know whether to continue feeding input or not.
474 474
475 475 The return value can be used to decide whether to use sys.ps1 or
476 476 sys.ps2 to prompt the next line."""
477 477
478 478 self.message = self.setup_message()
479 479
480 480 try:
481 481 code = self.command_compiler(source,filename,symbol)
482 482 except (OverflowError, SyntaxError, IndentationError, ValueError ), e:
483 483 # Case 1
484 484 self.traceback_trap.args = sys.exc_info()
485 485 self.pack_exception(self.message,e)
486 486 return COMPILER_ERROR,False
487 487
488 488 if code is None:
489 489 # Case 2: incomplete input. This means that the input can span
490 490 # multiple lines. But we still need to decide when to actually
491 491 # stop taking user input. Later we'll add auto-indentation support
492 492 # somehow. In the meantime, we'll just stop if there are two lines
493 493 # of pure whitespace at the end.
494 494 last_two = source.rsplit('\n',2)[-2:]
495 495 print 'last two:',last_two # dbg
496 496 if len(last_two)==2 and all(s.isspace() for s in last_two):
497 497 return COMPLETE_INPUT,False
498 498 else:
499 499 return INCOMPLETE_INPUT, True
500 500 else:
501 501 # Case 3
502 502 return COMPLETE_INPUT, False
503 503
504 504 def pull(self, keys):
505 505 """ Get an item out of the namespace by key.
506 506
507 507 Parameters
508 508 ----------
509 509 key : str
510 510
511 511 Returns
512 512 -------
513 513 value : object
514 514
515 515 Raises
516 516 ------
517 517 TypeError if the key is not a string.
518 518 NameError if the object doesn't exist.
519 519 """
520 520
521 521 if isinstance(keys, str):
522 522 result = self.user_ns.get(keys, NotDefined())
523 523 if isinstance(result, NotDefined):
524 524 raise NameError('name %s is not defined' % keys)
525 525 elif isinstance(keys, (list, tuple)):
526 526 result = []
527 527 for key in keys:
528 528 if not isinstance(key, str):
529 529 raise TypeError("objects must be keyed by strings.")
530 530 else:
531 531 r = self.user_ns.get(key, NotDefined())
532 532 if isinstance(r, NotDefined):
533 533 raise NameError('name %s is not defined' % key)
534 534 else:
535 535 result.append(r)
536 536 if len(keys)==1:
537 537 result = result[0]
538 538 else:
539 539 raise TypeError("keys must be a strong or a list/tuple of strings")
540 540 return result
541 541
542 542 def pull_function(self, keys):
543 543 return self.pull(keys)
544 544
545 545 #### Interactive user API ##################################################
546 546
547 547 def ipsystem(self, command):
548 548 """ Execute a command in a system shell while expanding variables in the
549 549 current namespace.
550 550
551 551 Parameters
552 552 ----------
553 553 command : str
554 554 """
555 555
556 556 # Expand $variables.
557 557 command = self.var_expand(command)
558 558
559 559 system_shell(command,
560 560 header='IPython system call: ',
561 561 verbose=self.rc.system_verbose,
562 562 )
563 563
564 564 def ipmagic(self, arg_string):
565 565 """ Call a magic function by name.
566 566
567 567 ipmagic('name -opt foo bar') is equivalent to typing at the ipython
568 568 prompt:
569 569
570 570 In[1]: %name -opt foo bar
571 571
572 572 To call a magic without arguments, simply use ipmagic('name').
573 573
574 574 This provides a proper Python function to call IPython's magics in any
575 575 valid Python code you can type at the interpreter, including loops and
576 576 compound statements. It is added by IPython to the Python builtin
577 577 namespace upon initialization.
578 578
579 579 Parameters
580 580 ----------
581 581 arg_string : str
582 582 A string containing the name of the magic function to call and any
583 583 additional arguments to be passed to the magic.
584 584
585 585 Returns
586 586 -------
587 587 something : object
588 588 The return value of the actual object.
589 589 """
590 590
591 591 # Taken from IPython.
592 592 raise NotImplementedError('Not ported yet')
593 593
594 594 args = arg_string.split(' ', 1)
595 595 magic_name = args[0]
596 596 magic_name = magic_name.lstrip(self.config.ESC_MAGIC)
597 597
598 598 try:
599 599 magic_args = args[1]
600 600 except IndexError:
601 601 magic_args = ''
602 602 fn = getattr(self.magic, 'magic_'+magic_name, None)
603 603 if fn is None:
604 604 self.error("Magic function `%s` not found." % magic_name)
605 605 else:
606 606 magic_args = self.var_expand(magic_args)
607 607 return fn(magic_args)
608 608
609 609
610 610 #### Private 'Interpreter' interface #######################################
611 611
612 612 def setup_message(self):
613 613 """Return a message object.
614 614
615 615 This method prepares and returns a message dictionary. This dict
616 616 contains the various fields that are used to transfer information about
617 617 execution, results, tracebacks, etc, to clients (either in or out of
618 618 process ones). Because of the need to work with possibly out of
619 619 process clients, this dict MUST contain strictly pickle-safe values.
620 620 """
621 621
622 622 return dict(number=self.current_cell_number)
623 623
624 624 def setup_namespace(self):
625 625 """ Add things to the namespace.
626 626 """
627 627
628 628 self.user_ns.setdefault('__name__', '__main__')
629 629 self.user_ns.setdefault('__builtins__', __builtin__)
630 630 self.user_ns['__IP'] = self
631 631 if self.raw_input_builtin is not None:
632 632 self.user_ns['raw_input'] = self.raw_input_builtin
633 633 if self.input_builtin is not None:
634 634 self.user_ns['input'] = self.input_builtin
635 635
636 636 builtin_additions = dict(
637 637 ipmagic=self.ipmagic,
638 638 )
639 639 __builtin__.__dict__.update(builtin_additions)
640 640
641 641 if self.history is not None:
642 642 self.history.setup_namespace(self.user_ns)
643 643
644 644 def set_traps(self):
645 645 """ Set all of the output, display, and traceback traps.
646 646 """
647 647
648 648 self.output_trap.set()
649 649 self.display_trap.set()
650 650 self.traceback_trap.set()
651 651
652 652 def unset_traps(self):
653 653 """ Unset all of the output, display, and traceback traps.
654 654 """
655 655
656 656 self.output_trap.unset()
657 657 self.display_trap.unset()
658 658 self.traceback_trap.unset()
659 659
660 660 def split_commands(self, python):
661 661 """ Split multiple lines of code into discrete commands that can be
662 662 executed singly.
663 663
664 664 Parameters
665 665 ----------
666 666 python : str
667 667 Pure, exec'able Python code.
668 668
669 669 Returns
670 670 -------
671 671 commands : list of str
672 672 Separate commands that can be exec'ed independently.
673 673 """
674 674
675 675 # compiler.parse treats trailing spaces after a newline as a
676 676 # SyntaxError. This is different than codeop.CommandCompiler, which
677 677 # will compile the trailng spaces just fine. We simply strip any
678 678 # trailing whitespace off. Passing a string with trailing whitespace
679 679 # to exec will fail however. There seems to be some inconsistency in
680 680 # how trailing whitespace is handled, but this seems to work.
681 681 python = python.strip()
682 682
683 # The compiler module does not like unicode. We need to convert
684 # it encode it:
685 if isinstance(python, unicode):
686 # Use the utf-8-sig BOM so the compiler detects this a UTF-8
687 # encode string.
688 python = '\xef\xbb\xbf' + python.encode('utf-8')
689
683 690 # The compiler module will parse the code into an abstract syntax tree.
684 691 ast = compiler.parse(python)
685 692
686 693 # Uncomment to help debug the ast tree
687 694 # for n in ast.node:
688 695 # print n.lineno,'->',n
689 696
690 697 # Each separate command is available by iterating over ast.node. The
691 698 # lineno attribute is the line number (1-indexed) beginning the commands
692 699 # suite.
693 700 # lines ending with ";" yield a Discard Node that doesn't have a lineno
694 701 # attribute. These nodes can and should be discarded. But there are
695 702 # other situations that cause Discard nodes that shouldn't be discarded.
696 703 # We might eventually discover other cases where lineno is None and have
697 704 # to put in a more sophisticated test.
698 705 linenos = [x.lineno-1 for x in ast.node if x.lineno is not None]
699 706
700 707 # When we finally get the slices, we will need to slice all the way to
701 708 # the end even though we don't have a line number for it. Fortunately,
702 709 # None does the job nicely.
703 710 linenos.append(None)
704 711 lines = python.splitlines()
705 712
706 713 # Create a list of atomic commands.
707 714 cmds = []
708 715 for i, j in zip(linenos[:-1], linenos[1:]):
709 716 cmd = lines[i:j]
710 717 if cmd:
711 718 cmds.append('\n'.join(cmd)+'\n')
712 719
713 720 return cmds
714 721
715 722 def error(self, text):
716 723 """ Pass an error message back to the shell.
717 724
718 725 Preconditions
719 726 -------------
720 727 This should only be called when self.message is set. In other words,
721 728 when code is being executed.
722 729
723 730 Parameters
724 731 ----------
725 732 text : str
726 733 """
727 734
728 735 errors = self.message.get('IPYTHON_ERROR', [])
729 736 errors.append(text)
730 737
731 738 def var_expand(self, template):
732 739 """ Expand $variables in the current namespace using Itpl.
733 740
734 741 Parameters
735 742 ----------
736 743 template : str
737 744 """
738 745
739 746 return str(ItplNS(template, self.user_ns))
740 747
741 748 def _possible_macro(self, obj):
742 749 """ If the object is a macro, execute it.
743 750 """
744 751
745 752 if isinstance(obj, Macro):
746 753 self.execute_macro(obj)
747 754
@@ -1,61 +1,70
1 1 # encoding: utf-8
2 2 """
3 3 Test the output capture at the OS level, using file descriptors.
4 4 """
5 5
6 6 __docformat__ = "restructuredtext en"
7 7
8 8 #-------------------------------------------------------------------------------
9 9 # Copyright (C) 2008 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is
12 12 # in the file COPYING, distributed as part of this software.
13 13 #-------------------------------------------------------------------------------
14 14
15 15
16 # Stdlib imports
16 17 import os
17 18 from cStringIO import StringIO
18 19
20 # Our own imports
21 from IPython.testing import decorators as dec
19 22
23 #-----------------------------------------------------------------------------
24 # Test functions
25
26 @dec.skip_win32
20 27 def test_redirector():
21 28 """ Checks that the redirector can be used to do synchronous capture.
22 29 """
23 30 from IPython.kernel.core.fd_redirector import FDRedirector
24 31 r = FDRedirector()
25 32 out = StringIO()
26 33 try:
27 34 r.start()
28 35 for i in range(10):
29 36 os.system('echo %ic' % i)
30 37 print >>out, r.getvalue(),
31 38 print >>out, i
32 39 except:
33 40 r.stop()
34 41 raise
35 42 r.stop()
36 assert out.getvalue() == "".join("%ic\n%i\n" %(i, i) for i in range(10))
43 result1 = out.getvalue()
44 result2 = "".join("%ic\n%i\n" %(i, i) for i in range(10))
45 assert result1 == result2
37 46
38 47
48 @dec.skip_win32
39 49 def test_redirector_output_trap():
40 50 """ This test check not only that the redirector_output_trap does
41 51 trap the output, but also that it does it in a gready way, that
42 52 is by calling the callback ASAP.
43 53 """
44 54 from IPython.kernel.core.redirector_output_trap import RedirectorOutputTrap
45 55 out = StringIO()
46 56 trap = RedirectorOutputTrap(out.write, out.write)
47 57 try:
48 58 trap.set()
49 59 for i in range(10):
50 60 os.system('echo %ic' % i)
51 61 print "%ip" % i
52 62 print >>out, i
53 63 except:
54 64 trap.unset()
55 65 raise
56 66 trap.unset()
57 assert out.getvalue() == "".join("%ic\n%ip\n%i\n" %(i, i, i)
58 for i in range(10))
59
67 result1 = out.getvalue()
68 result2 = "".join("%ic\n%ip\n%i\n" %(i, i, i) for i in range(10))
69 assert result1 == result2
60 70
61
@@ -1,87 +1,92
1 1 # encoding: utf-8
2 2
3 3 """A class that manages the engines connection to the controller."""
4 4
5 5 __docformat__ = "restructuredtext en"
6 6
7 7 #-------------------------------------------------------------------------------
8 8 # Copyright (C) 2008 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-------------------------------------------------------------------------------
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Imports
16 16 #-------------------------------------------------------------------------------
17 17
18 18 import os
19 19 import cPickle as pickle
20 20
21 from twisted.python import log
21 from twisted.python import log, failure
22 from twisted.internet import defer
22 23
23 24 from IPython.kernel.fcutil import find_furl
24 25 from IPython.kernel.enginefc import IFCEngine
25 26
26 27 #-------------------------------------------------------------------------------
27 28 # The ClientConnector class
28 29 #-------------------------------------------------------------------------------
29 30
30 31 class EngineConnector(object):
31 32 """Manage an engines connection to a controller.
32 33
33 34 This class takes a foolscap `Tub` and provides a `connect_to_controller`
34 35 method that will use the `Tub` to connect to a controller and register
35 36 the engine with the controller.
36 37 """
37 38
38 39 def __init__(self, tub):
39 40 self.tub = tub
40 41
41 42 def connect_to_controller(self, engine_service, furl_or_file):
42 43 """
43 44 Make a connection to a controller specified by a furl.
44 45
45 46 This method takes an `IEngineBase` instance and a foolcap URL and uses
46 47 the `tub` attribute to make a connection to the controller. The
47 48 foolscap URL contains all the information needed to connect to the
48 49 controller, including the ip and port as well as any encryption and
49 50 authentication information needed for the connection.
50 51
51 52 After getting a reference to the controller, this method calls the
52 53 `register_engine` method of the controller to actually register the
53 54 engine.
54 55
55 56 :Parameters:
56 57 engine_service : IEngineBase
57 58 An instance of an `IEngineBase` implementer
58 59 furl_or_file : str
59 60 A furl or a filename containing a furl
60 61 """
61 62 if not self.tub.running:
62 63 self.tub.startService()
63 64 self.engine_service = engine_service
64 65 self.engine_reference = IFCEngine(self.engine_service)
65 self.furl = find_furl(furl_or_file)
66 try:
67 self.furl = find_furl(furl_or_file)
68 except ValueError:
69 return defer.fail(failure.Failure())
70 # return defer.fail(failure.Failure(ValueError('not a valid furl or furl file: %r' % furl_or_file)))
66 71 d = self.tub.getReference(self.furl)
67 72 d.addCallbacks(self._register, self._log_failure)
68 73 return d
69 74
70 75 def _log_failure(self, reason):
71 log.err('engine registration failed:')
76 log.err('EngineConnector: engine registration failed:')
72 77 log.err(reason)
73 78 return reason
74 79
75 80 def _register(self, rr):
76 81 self.remote_ref = rr
77 82 # Now register myself with the controller
78 83 desired_id = self.engine_service.id
79 84 d = self.remote_ref.callRemote('register_engine', self.engine_reference,
80 85 desired_id, os.getpid(), pickle.dumps(self.engine_service.properties,2))
81 86 return d.addCallbacks(self._reference_sent, self._log_failure)
82 87
83 88 def _reference_sent(self, registration_dict):
84 89 self.engine_service.id = registration_dict['id']
85 90 log.msg("engine registration succeeded, got id: %r" % self.engine_service.id)
86 91 return self.engine_service.id
87 92
This diff has been collapsed as it changes many lines, (730 lines changed) Show them Hide them
@@ -1,324 +1,486
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3
4 """Start an IPython cluster conveniently, either locally or remotely.
4 """Start an IPython cluster = (controller + engines)."""
5 5
6 Basic usage
7 -----------
8
9 For local operation, the simplest mode of usage is:
10
11 %prog -n N
12
13 where N is the number of engines you want started.
14
15 For remote operation, you must call it with a cluster description file:
16
17 %prog -f clusterfile.py
18
19 The cluster file is a normal Python script which gets run via execfile(). You
20 can have arbitrary logic in it, but all that matters is that at the end of the
21 execution, it declares the variables 'controller', 'engines', and optionally
22 'sshx'. See the accompanying examples for details on what these variables must
23 contain.
24
25
26 Notes
27 -----
28
29 WARNING: this code is still UNFINISHED and EXPERIMENTAL! It is incomplete,
30 some listed options are not really implemented, and all of its interfaces are
31 subject to change.
32
33 When operating over SSH for a remote cluster, this program relies on the
34 existence of a particular script called 'sshx'. This script must live in the
35 target systems where you'll be running your controller and engines, and is
36 needed to configure your PATH and PYTHONPATH variables for further execution of
37 python code at the other end of an SSH connection. The script can be as simple
38 as:
39
40 #!/bin/sh
41 . $HOME/.bashrc
42 "$@"
43
44 which is the default one provided by IPython. You can modify this or provide
45 your own. Since it's quite likely that for different clusters you may need
46 this script to configure things differently or that it may live in different
47 locations, its full path can be set in the same file where you define the
48 cluster setup. IPython's order of evaluation for this variable is the
49 following:
50
51 a) Internal default: 'sshx'. This only works if it is in the default system
52 path which SSH sets up in non-interactive mode.
53
54 b) Environment variable: if $IPYTHON_SSHX is defined, this overrides the
55 internal default.
56
57 c) Variable 'sshx' in the cluster configuration file: finally, this will
58 override the previous two values.
59
60 This code is Unix-only, with precious little hope of any of this ever working
61 under Windows, since we need SSH from the ground up, we background processes,
62 etc. Ports of this functionality to Windows are welcome.
63
64
65 Call summary
66 ------------
67
68 %prog [options]
69 """
70
71 __docformat__ = "restructuredtext en"
72
73 #-------------------------------------------------------------------------------
6 #-----------------------------------------------------------------------------
74 7 # Copyright (C) 2008 The IPython Development Team
75 8 #
76 9 # Distributed under the terms of the BSD License. The full license is in
77 10 # the file COPYING, distributed as part of this software.
78 #-------------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
79 12
80 #-------------------------------------------------------------------------------
81 # Stdlib imports
82 #-------------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Imports
15 #-----------------------------------------------------------------------------
83 16
84 17 import os
85 import signal
18 import re
86 19 import sys
87 import time
20 import signal
21 pjoin = os.path.join
88 22
89 from optparse import OptionParser
90 from subprocess import Popen,call
23 from twisted.internet import reactor, defer
24 from twisted.internet.protocol import ProcessProtocol
25 from twisted.python import failure, log
26 from twisted.internet.error import ProcessDone, ProcessTerminated
27 from twisted.internet.utils import getProcessOutput
91 28
92 #---------------------------------------------------------------------------
93 # IPython imports
94 #---------------------------------------------------------------------------
95 from IPython.tools import utils
96 from IPython.config import cutils
29 from IPython.external import argparse
30 from IPython.external import Itpl
31 from IPython.kernel.twistedutil import gatherBoth
32 from IPython.kernel.util import printer
33 from IPython.genutils import get_ipython_dir, num_cpus
97 34
98 #---------------------------------------------------------------------------
99 # Normal code begins
100 #---------------------------------------------------------------------------
35 #-----------------------------------------------------------------------------
36 # General process handling code
37 #-----------------------------------------------------------------------------
101 38
102 def parse_args():
103 """Parse command line and return opts,args."""
39 def find_exe(cmd):
40 try:
41 import win32api
42 except ImportError:
43 raise ImportError('you need to have pywin32 installed for this to work')
44 else:
45 (path, offest) = win32api.SearchPath(os.environ['PATH'],cmd)
46 return path
104 47
105 parser = OptionParser(usage=__doc__)
106 newopt = parser.add_option # shorthand
48 class ProcessStateError(Exception):
49 pass
107 50
108 newopt("--controller-port", type="int", dest="controllerport",
109 help="the TCP port the controller is listening on")
51 class UnknownStatus(Exception):
52 pass
110 53
111 newopt("--controller-ip", type="string", dest="controllerip",
112 help="the TCP ip address of the controller")
54 class LauncherProcessProtocol(ProcessProtocol):
55 """
56 A ProcessProtocol to go with the ProcessLauncher.
57 """
58 def __init__(self, process_launcher):
59 self.process_launcher = process_launcher
60
61 def connectionMade(self):
62 self.process_launcher.fire_start_deferred(self.transport.pid)
63
64 def processEnded(self, status):
65 value = status.value
66 if isinstance(value, ProcessDone):
67 self.process_launcher.fire_stop_deferred(0)
68 elif isinstance(value, ProcessTerminated):
69 self.process_launcher.fire_stop_deferred(
70 {'exit_code':value.exitCode,
71 'signal':value.signal,
72 'status':value.status
73 }
74 )
75 else:
76 raise UnknownStatus("unknown exit status, this is probably a bug in Twisted")
113 77
114 newopt("-n", "--num", type="int", dest="n",default=2,
115 help="the number of engines to start")
78 def outReceived(self, data):
79 log.msg(data)
116 80
117 newopt("--engine-port", type="int", dest="engineport",
118 help="the TCP port the controller will listen on for engine "
119 "connections")
120
121 newopt("--engine-ip", type="string", dest="engineip",
122 help="the TCP ip address the controller will listen on "
123 "for engine connections")
81 def errReceived(self, data):
82 log.err(data)
124 83
125 newopt("--mpi", type="string", dest="mpi",
126 help="use mpi with package: for instance --mpi=mpi4py")
84 class ProcessLauncher(object):
85 """
86 Start and stop an external process in an asynchronous manner.
87
88 Currently this uses deferreds to notify other parties of process state
89 changes. This is an awkward design and should be moved to using
90 a formal NotificationCenter.
91 """
92 def __init__(self, cmd_and_args):
93 self.cmd = cmd_and_args[0]
94 self.args = cmd_and_args
95 self._reset()
96
97 def _reset(self):
98 self.process_protocol = None
99 self.pid = None
100 self.start_deferred = None
101 self.stop_deferreds = []
102 self.state = 'before' # before, running, or after
127 103
128 newopt("-l", "--logfile", type="string", dest="logfile",
129 help="log file name")
104 @property
105 def running(self):
106 if self.state == 'running':
107 return True
108 else:
109 return False
110
111 def fire_start_deferred(self, pid):
112 self.pid = pid
113 self.state = 'running'
114 log.msg('Process %r has started with pid=%i' % (self.args, pid))
115 self.start_deferred.callback(pid)
116
117 def start(self):
118 if self.state == 'before':
119 self.process_protocol = LauncherProcessProtocol(self)
120 self.start_deferred = defer.Deferred()
121 self.process_transport = reactor.spawnProcess(
122 self.process_protocol,
123 self.cmd,
124 self.args,
125 env=os.environ
126 )
127 return self.start_deferred
128 else:
129 s = 'the process has already been started and has state: %r' % \
130 self.state
131 return defer.fail(ProcessStateError(s))
132
133 def get_stop_deferred(self):
134 if self.state == 'running' or self.state == 'before':
135 d = defer.Deferred()
136 self.stop_deferreds.append(d)
137 return d
138 else:
139 s = 'this process is already complete'
140 return defer.fail(ProcessStateError(s))
141
142 def fire_stop_deferred(self, exit_code):
143 log.msg('Process %r has stopped with %r' % (self.args, exit_code))
144 self.state = 'after'
145 for d in self.stop_deferreds:
146 d.callback(exit_code)
147
148 def signal(self, sig):
149 """
150 Send a signal to the process.
151
152 The argument sig can be ('KILL','INT', etc.) or any signal number.
153 """
154 if self.state == 'running':
155 self.process_transport.signalProcess(sig)
156
157 # def __del__(self):
158 # self.signal('KILL')
159
160 def interrupt_then_kill(self, delay=1.0):
161 self.signal('INT')
162 reactor.callLater(delay, self.signal, 'KILL')
130 163
131 newopt('-f','--cluster-file',dest='clusterfile',
132 help='file describing a remote cluster')
133 164
134 return parser.parse_args()
165 #-----------------------------------------------------------------------------
166 # Code for launching controller and engines
167 #-----------------------------------------------------------------------------
135 168
136 def numAlive(controller,engines):
137 """Return the number of processes still alive."""
138 retcodes = [controller.poll()] + \
139 [e.poll() for e in engines]
140 return retcodes.count(None)
141 169
142 stop = lambda pid: os.kill(pid,signal.SIGINT)
143 kill = lambda pid: os.kill(pid,signal.SIGTERM)
170 class ControllerLauncher(ProcessLauncher):
171
172 def __init__(self, extra_args=None):
173 if sys.platform == 'win32':
174 args = [find_exe('ipcontroller.bat')]
175 else:
176 args = ['ipcontroller']
177 self.extra_args = extra_args
178 if extra_args is not None:
179 args.extend(extra_args)
180
181 ProcessLauncher.__init__(self, args)
182
144 183
145 def cleanup(clean,controller,engines):
146 """Stop the controller and engines with the given cleanup method."""
184 class EngineLauncher(ProcessLauncher):
147 185
148 for e in engines:
149 if e.poll() is None:
150 print 'Stopping engine, pid',e.pid
151 clean(e.pid)
152 if controller.poll() is None:
153 print 'Stopping controller, pid',controller.pid
154 clean(controller.pid)
155
156
157 def ensureDir(path):
158 """Ensure a directory exists or raise an exception."""
159 if not os.path.isdir(path):
160 os.makedirs(path)
161
162
163 def startMsg(control_host,control_port=10105):
164 """Print a startup message"""
165 print
166 print 'Your cluster is up and running.'
167 print
168 print 'For interactive use, you can make a MultiEngineClient with:'
169 print
170 print 'from IPython.kernel import client'
171 print "mec = client.MultiEngineClient()"
172 print
173 print 'You can then cleanly stop the cluster from IPython using:'
174 print
175 print 'mec.kill(controller=True)'
176 print
186 def __init__(self, extra_args=None):
187 if sys.platform == 'win32':
188 args = [find_exe('ipengine.bat')]
189 else:
190 args = ['ipengine']
191 self.extra_args = extra_args
192 if extra_args is not None:
193 args.extend(extra_args)
194
195 ProcessLauncher.__init__(self, args)
177 196
197
198 class LocalEngineSet(object):
178 199
179 def clusterLocal(opt,arg):
180 """Start a cluster on the local machine."""
200 def __init__(self, extra_args=None):
201 self.extra_args = extra_args
202 self.launchers = []
181 203
182 # Store all logs inside the ipython directory
183 ipdir = cutils.get_ipython_dir()
184 pjoin = os.path.join
185
186 logfile = opt.logfile
187 if logfile is None:
188 logdir_base = pjoin(ipdir,'log')
189 ensureDir(logdir_base)
190 logfile = pjoin(logdir_base,'ipcluster-')
191
192 print 'Starting controller:',
193 controller = Popen(['ipcontroller','--logfile',logfile,'-x','-y'])
194 print 'Controller PID:',controller.pid
195
196 print 'Starting engines: ',
197 time.sleep(5)
198
199 englogfile = '%s%s-' % (logfile,controller.pid)
200 mpi = opt.mpi
201 if mpi: # start with mpi - killing the engines with sigterm will not work if you do this
202 engines = [Popen(['mpirun', '-np', str(opt.n), 'ipengine', '--mpi',
203 mpi, '--logfile',englogfile])]
204 # engines = [Popen(['mpirun', '-np', str(opt.n), 'ipengine', '--mpi', mpi])]
205 else: # do what we would normally do
206 engines = [ Popen(['ipengine','--logfile',englogfile])
207 for i in range(opt.n) ]
208 eids = [e.pid for e in engines]
209 print 'Engines PIDs: ',eids
210 print 'Log files: %s*' % englogfile
204 def start(self, n):
205 dlist = []
206 for i in range(n):
207 el = EngineLauncher(extra_args=self.extra_args)
208 d = el.start()
209 self.launchers.append(el)
210 dlist.append(d)
211 dfinal = gatherBoth(dlist, consumeErrors=True)
212 dfinal.addCallback(self._handle_start)
213 return dfinal
211 214
212 proc_ids = eids + [controller.pid]
213 procs = engines + [controller]
214
215 grpid = os.getpgrp()
216 try:
217 startMsg('127.0.0.1')
218 print 'You can also hit Ctrl-C to stop it, or use from the cmd line:'
219 print
220 print 'kill -INT',grpid
221 print
222 try:
223 while True:
224 time.sleep(5)
225 except:
226 pass
227 finally:
228 print 'Stopping cluster. Cleaning up...'
229 cleanup(stop,controller,engines)
230 for i in range(4):
231 time.sleep(i+2)
232 nZombies = numAlive(controller,engines)
233 if nZombies== 0:
234 print 'OK: All processes cleaned up.'
235 break
236 print 'Trying again, %d processes did not stop...' % nZombies
237 cleanup(kill,controller,engines)
238 if numAlive(controller,engines) == 0:
239 print 'OK: All processes cleaned up.'
240 break
241 else:
242 print '*'*75
243 print 'ERROR: could not kill some processes, try to do it',
244 print 'manually.'
245 zombies = []
246 if controller.returncode is None:
247 print 'Controller is alive: pid =',controller.pid
248 zombies.append(controller.pid)
249 liveEngines = [ e for e in engines if e.returncode is None ]
250 for e in liveEngines:
251 print 'Engine is alive: pid =',e.pid
252 zombies.append(e.pid)
253 print
254 print 'Zombie summary:',' '.join(map(str,zombies))
255
256 def clusterRemote(opt,arg):
257 """Start a remote cluster over SSH"""
258
259 # Load the remote cluster configuration
260 clConfig = {}
261 execfile(opt.clusterfile,clConfig)
262 contConfig = clConfig['controller']
263 engConfig = clConfig['engines']
264 # Determine where to find sshx:
265 sshx = clConfig.get('sshx',os.environ.get('IPYTHON_SSHX','sshx'))
215 def _handle_start(self, r):
216 log.msg('Engines started with pids: %r' % r)
217 return r
266 218
267 # Store all logs inside the ipython directory
268 ipdir = cutils.get_ipython_dir()
269 pjoin = os.path.join
270
271 logfile = opt.logfile
272 if logfile is None:
273 logdir_base = pjoin(ipdir,'log')
274 ensureDir(logdir_base)
275 logfile = pjoin(logdir_base,'ipcluster')
276
277 # Append this script's PID to the logfile name always
278 logfile = '%s-%s' % (logfile,os.getpid())
219 def _handle_stop(self, r):
220 log.msg('Engines received signal: %r' % r)
221 return r
279 222
280 print 'Starting controller:'
281 # Controller data:
282 xsys = os.system
283
284 contHost = contConfig['host']
285 contLog = '%s-con-%s-' % (logfile,contHost)
286 cmd = "ssh %s '%s' 'ipcontroller --logfile %s' &" % \
287 (contHost,sshx,contLog)
288 #print 'cmd:<%s>' % cmd # dbg
289 xsys(cmd)
290 time.sleep(2)
291
292 print 'Starting engines: '
293 for engineHost,engineData in engConfig.iteritems():
294 if isinstance(engineData,int):
295 numEngines = engineData
223 def signal(self, sig):
224 dlist = []
225 for el in self.launchers:
226 d = el.get_stop_deferred()
227 dlist.append(d)
228 el.signal(sig)
229 dfinal = gatherBoth(dlist, consumeErrors=True)
230 dfinal.addCallback(self._handle_stop)
231 return dfinal
232
233 def interrupt_then_kill(self, delay=1.0):
234 dlist = []
235 for el in self.launchers:
236 d = el.get_stop_deferred()
237 dlist.append(d)
238 el.interrupt_then_kill(delay)
239 dfinal = gatherBoth(dlist, consumeErrors=True)
240 dfinal.addCallback(self._handle_stop)
241 return dfinal
242
243
244 class BatchEngineSet(object):
245
246 # Subclasses must fill these in. See PBSEngineSet
247 submit_command = ''
248 delete_command = ''
249 job_id_regexp = ''
250
251 def __init__(self, template_file, **kwargs):
252 self.template_file = template_file
253 self.context = {}
254 self.context.update(kwargs)
255 self.batch_file = self.template_file+'-run'
256
257 def parse_job_id(self, output):
258 m = re.match(self.job_id_regexp, output)
259 if m is not None:
260 job_id = m.group()
296 261 else:
297 raise NotImplementedError('port configuration not finished for engines')
298
299 print 'Sarting %d engines on %s' % (numEngines,engineHost)
300 engLog = '%s-eng-%s-' % (logfile,engineHost)
301 for i in range(numEngines):
302 cmd = "ssh %s '%s' 'ipengine --controller-ip %s --logfile %s' &" % \
303 (engineHost,sshx,contHost,engLog)
304 #print 'cmd:<%s>' % cmd # dbg
305 xsys(cmd)
306 # Wait after each host a little bit
307 time.sleep(1)
308
309 startMsg(contConfig['host'])
262 raise Exception("job id couldn't be determined: %s" % output)
263 self.job_id = job_id
264 log.msg('Job started with job id: %r' % job_id)
265 return job_id
266
267 def write_batch_script(self, n):
268 self.context['n'] = n
269 template = open(self.template_file, 'r').read()
270 log.msg('Using template for batch script: %s' % self.template_file)
271 script_as_string = Itpl.itplns(template, self.context)
272 log.msg('Writing instantiated batch script: %s' % self.batch_file)
273 f = open(self.batch_file,'w')
274 f.write(script_as_string)
275 f.close()
276
277 def handle_error(self, f):
278 f.printTraceback()
279 f.raiseException()
280
281 def start(self, n):
282 self.write_batch_script(n)
283 d = getProcessOutput(self.submit_command,
284 [self.batch_file],env=os.environ)
285 d.addCallback(self.parse_job_id)
286 d.addErrback(self.handle_error)
287 return d
310 288
311 def main():
312 """Main driver for the two big options: local or remote cluster."""
289 def kill(self):
290 d = getProcessOutput(self.delete_command,
291 [self.job_id],env=os.environ)
292 return d
293
294 class PBSEngineSet(BatchEngineSet):
295
296 submit_command = 'qsub'
297 delete_command = 'qdel'
298 job_id_regexp = '\d+'
313 299
314 opt,arg = parse_args()
300 def __init__(self, template_file, **kwargs):
301 BatchEngineSet.__init__(self, template_file, **kwargs)
302
303
304 #-----------------------------------------------------------------------------
305 # Main functions for the different types of clusters
306 #-----------------------------------------------------------------------------
307
308 # TODO:
309 # The logic in these codes should be moved into classes like LocalCluster
310 # MpirunCluster, PBSCluster, etc. This would remove alot of the duplications.
311 # The main functions should then just parse the command line arguments, create
312 # the appropriate class and call a 'start' method.
313
314 def main_local(args):
315 cont_args = []
316 cont_args.append('--logfile=%s' % pjoin(args.logdir,'ipcontroller'))
317 if args.x:
318 cont_args.append('-x')
319 if args.y:
320 cont_args.append('-y')
321 cl = ControllerLauncher(extra_args=cont_args)
322 dstart = cl.start()
323 def start_engines(cont_pid):
324 engine_args = []
325 engine_args.append('--logfile=%s' % \
326 pjoin(args.logdir,'ipengine%s-' % cont_pid))
327 eset = LocalEngineSet(extra_args=engine_args)
328 def shutdown(signum, frame):
329 log.msg('Stopping local cluster')
330 # We are still playing with the times here, but these seem
331 # to be reliable in allowing everything to exit cleanly.
332 eset.interrupt_then_kill(0.5)
333 cl.interrupt_then_kill(0.5)
334 reactor.callLater(1.0, reactor.stop)
335 signal.signal(signal.SIGINT,shutdown)
336 d = eset.start(args.n)
337 return d
338 def delay_start(cont_pid):
339 # This is needed because the controller doesn't start listening
340 # right when it starts and the controller needs to write
341 # furl files for the engine to pick up
342 reactor.callLater(1.0, start_engines, cont_pid)
343 dstart.addCallback(delay_start)
344 dstart.addErrback(lambda f: f.raiseException())
345
346 def main_mpirun(args):
347 cont_args = []
348 cont_args.append('--logfile=%s' % pjoin(args.logdir,'ipcontroller'))
349 if args.x:
350 cont_args.append('-x')
351 if args.y:
352 cont_args.append('-y')
353 cl = ControllerLauncher(extra_args=cont_args)
354 dstart = cl.start()
355 def start_engines(cont_pid):
356 raw_args = ['mpirun']
357 raw_args.extend(['-n',str(args.n)])
358 raw_args.append('ipengine')
359 raw_args.append('-l')
360 raw_args.append(pjoin(args.logdir,'ipengine%s-' % cont_pid))
361 if args.mpi:
362 raw_args.append('--mpi=%s' % args.mpi)
363 eset = ProcessLauncher(raw_args)
364 def shutdown(signum, frame):
365 log.msg('Stopping local cluster')
366 # We are still playing with the times here, but these seem
367 # to be reliable in allowing everything to exit cleanly.
368 eset.interrupt_then_kill(1.0)
369 cl.interrupt_then_kill(1.0)
370 reactor.callLater(2.0, reactor.stop)
371 signal.signal(signal.SIGINT,shutdown)
372 d = eset.start()
373 return d
374 def delay_start(cont_pid):
375 # This is needed because the controller doesn't start listening
376 # right when it starts and the controller needs to write
377 # furl files for the engine to pick up
378 reactor.callLater(1.0, start_engines, cont_pid)
379 dstart.addCallback(delay_start)
380 dstart.addErrback(lambda f: f.raiseException())
381
382 def main_pbs(args):
383 cont_args = []
384 cont_args.append('--logfile=%s' % pjoin(args.logdir,'ipcontroller'))
385 if args.x:
386 cont_args.append('-x')
387 if args.y:
388 cont_args.append('-y')
389 cl = ControllerLauncher(extra_args=cont_args)
390 dstart = cl.start()
391 def start_engines(r):
392 pbs_set = PBSEngineSet(args.pbsscript)
393 def shutdown(signum, frame):
394 log.msg('Stopping pbs cluster')
395 d = pbs_set.kill()
396 d.addBoth(lambda _: cl.interrupt_then_kill(1.0))
397 d.addBoth(lambda _: reactor.callLater(2.0, reactor.stop))
398 signal.signal(signal.SIGINT,shutdown)
399 d = pbs_set.start(args.n)
400 return d
401 dstart.addCallback(start_engines)
402 dstart.addErrback(lambda f: f.raiseException())
403
404
405 def get_args():
406 base_parser = argparse.ArgumentParser(add_help=False)
407 base_parser.add_argument(
408 '-x',
409 action='store_true',
410 dest='x',
411 help='turn off client security'
412 )
413 base_parser.add_argument(
414 '-y',
415 action='store_true',
416 dest='y',
417 help='turn off engine security'
418 )
419 base_parser.add_argument(
420 "--logdir",
421 type=str,
422 dest="logdir",
423 help="directory to put log files (default=$IPYTHONDIR/log)",
424 default=pjoin(get_ipython_dir(),'log')
425 )
426 base_parser.add_argument(
427 "-n",
428 "--num",
429 type=int,
430 dest="n",
431 default=2,
432 help="the number of engines to start"
433 )
434
435 parser = argparse.ArgumentParser(
436 description='IPython cluster startup. This starts a controller and\
437 engines using various approaches. THIS IS A TECHNOLOGY PREVIEW AND\
438 THE API WILL CHANGE SIGNIFICANTLY BEFORE THE FINAL RELEASE.'
439 )
440 subparsers = parser.add_subparsers(
441 help='available cluster types. For help, do "ipcluster TYPE --help"')
442
443 parser_local = subparsers.add_parser(
444 'local',
445 help='run a local cluster',
446 parents=[base_parser]
447 )
448 parser_local.set_defaults(func=main_local)
449
450 parser_mpirun = subparsers.add_parser(
451 'mpirun',
452 help='run a cluster using mpirun',
453 parents=[base_parser]
454 )
455 parser_mpirun.add_argument(
456 "--mpi",
457 type=str,
458 dest="mpi", # Don't put a default here to allow no MPI support
459 help="how to call MPI_Init (default=mpi4py)"
460 )
461 parser_mpirun.set_defaults(func=main_mpirun)
462
463 parser_pbs = subparsers.add_parser(
464 'pbs',
465 help='run a pbs cluster',
466 parents=[base_parser]
467 )
468 parser_pbs.add_argument(
469 '--pbs-script',
470 type=str,
471 dest='pbsscript',
472 help='PBS script template',
473 default='pbs.template'
474 )
475 parser_pbs.set_defaults(func=main_pbs)
476 args = parser.parse_args()
477 return args
315 478
316 clusterfile = opt.clusterfile
317 if clusterfile:
318 clusterRemote(opt,arg)
319 else:
320 clusterLocal(opt,arg)
321
322
323 if __name__=='__main__':
479 def main():
480 args = get_args()
481 reactor.callWhenRunning(args.func, args)
482 log.startLogging(sys.stdout)
483 reactor.run()
484
485 if __name__ == '__main__':
324 486 main()
@@ -1,366 +1,388
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3
4 4 """The IPython controller."""
5 5
6 6 __docformat__ = "restructuredtext en"
7 7
8 8 #-------------------------------------------------------------------------------
9 9 # Copyright (C) 2008 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-------------------------------------------------------------------------------
14 14
15 15 #-------------------------------------------------------------------------------
16 16 # Imports
17 17 #-------------------------------------------------------------------------------
18 18
19 19 # Python looks for an empty string at the beginning of sys.path to enable
20 20 # importing from the cwd.
21 21 import sys
22 22 sys.path.insert(0, '')
23 23
24 24 import sys, time, os
25 25 from optparse import OptionParser
26 26
27 27 from twisted.application import internet, service
28 28 from twisted.internet import reactor, error, defer
29 29 from twisted.python import log
30 30
31 31 from IPython.kernel.fcutil import Tub, UnauthenticatedTub, have_crypto
32 32
33 33 # from IPython.tools import growl
34 34 # growl.start("IPython1 Controller")
35 35
36 36 from IPython.kernel.error import SecurityError
37 37 from IPython.kernel import controllerservice
38 38 from IPython.kernel.fcutil import check_furl_file_security
39 39
40 40 from IPython.kernel.config import config_manager as kernel_config_manager
41 41 from IPython.config.cutils import import_item
42 42
43 43
44 44 #-------------------------------------------------------------------------------
45 45 # Code
46 46 #-------------------------------------------------------------------------------
47 47
48 48 def make_tub(ip, port, secure, cert_file):
49 49 """
50 50 Create a listening tub given an ip, port, and cert_file location.
51 51
52 52 :Parameters:
53 53 ip : str
54 54 The ip address that the tub should listen on. Empty means all
55 55 port : int
56 56 The port that the tub should listen on. A value of 0 means
57 57 pick a random port
58 58 secure: boolean
59 59 Will the connection be secure (in the foolscap sense)
60 60 cert_file:
61 61 A filename of a file to be used for theSSL certificate
62 62 """
63 63 if secure:
64 64 if have_crypto:
65 65 tub = Tub(certFile=cert_file)
66 66 else:
67 raise SecurityError("OpenSSL is not available, so we can't run in secure mode, aborting")
67 raise SecurityError("""
68 OpenSSL/pyOpenSSL is not available, so we can't run in secure mode.
69 Try running without security using 'ipcontroller -xy'.
70 """)
68 71 else:
69 72 tub = UnauthenticatedTub()
70 73
71 74 # Set the strport based on the ip and port and start listening
72 75 if ip == '':
73 76 strport = "tcp:%i" % port
74 77 else:
75 78 strport = "tcp:%i:interface=%s" % (port, ip)
76 79 listener = tub.listenOn(strport)
77 80
78 81 return tub, listener
79 82
80 83 def make_client_service(controller_service, config):
81 84 """
82 85 Create a service that will listen for clients.
83 86
84 87 This service is simply a `foolscap.Tub` instance that has a set of Referenceables
85 88 registered with it.
86 89 """
87 90
88 91 # Now create the foolscap tub
89 92 ip = config['controller']['client_tub']['ip']
90 93 port = config['controller']['client_tub'].as_int('port')
91 94 location = config['controller']['client_tub']['location']
92 95 secure = config['controller']['client_tub']['secure']
93 96 cert_file = config['controller']['client_tub']['cert_file']
94 97 client_tub, client_listener = make_tub(ip, port, secure, cert_file)
95 98
96 99 # Set the location in the trivial case of localhost
97 100 if ip == 'localhost' or ip == '127.0.0.1':
98 101 location = "127.0.0.1"
99 102
100 103 if not secure:
101 104 log.msg("WARNING: you are running the controller with no client security")
102 105
103 106 def set_location_and_register():
104 107 """Set the location for the tub and return a deferred."""
105 108
106 109 def register(empty, ref, furl_file):
107 110 client_tub.registerReference(ref, furlFile=furl_file)
108 111
109 112 if location == '':
110 113 d = client_tub.setLocationAutomatically()
111 114 else:
112 115 d = defer.maybeDeferred(client_tub.setLocation, "%s:%i" % (location, client_listener.getPortnum()))
113 116
114 117 for ciname, ci in config['controller']['controller_interfaces'].iteritems():
115 118 log.msg("Adapting Controller to interface: %s" % ciname)
116 119 furl_file = ci['furl_file']
117 120 log.msg("Saving furl for interface [%s] to file: %s" % (ciname, furl_file))
118 121 check_furl_file_security(furl_file, secure)
119 122 adapted_controller = import_item(ci['controller_interface'])(controller_service)
120 123 d.addCallback(register, import_item(ci['fc_interface'])(adapted_controller),
121 124 furl_file=ci['furl_file'])
122 125
123 126 reactor.callWhenRunning(set_location_and_register)
124 127 return client_tub
125 128
126 129
127 130 def make_engine_service(controller_service, config):
128 131 """
129 132 Create a service that will listen for engines.
130 133
131 134 This service is simply a `foolscap.Tub` instance that has a set of Referenceables
132 135 registered with it.
133 136 """
134 137
135 138 # Now create the foolscap tub
136 139 ip = config['controller']['engine_tub']['ip']
137 140 port = config['controller']['engine_tub'].as_int('port')
138 141 location = config['controller']['engine_tub']['location']
139 142 secure = config['controller']['engine_tub']['secure']
140 143 cert_file = config['controller']['engine_tub']['cert_file']
141 144 engine_tub, engine_listener = make_tub(ip, port, secure, cert_file)
142 145
143 146 # Set the location in the trivial case of localhost
144 147 if ip == 'localhost' or ip == '127.0.0.1':
145 148 location = "127.0.0.1"
146 149
147 150 if not secure:
148 151 log.msg("WARNING: you are running the controller with no engine security")
149 152
150 153 def set_location_and_register():
151 154 """Set the location for the tub and return a deferred."""
152 155
153 156 def register(empty, ref, furl_file):
154 157 engine_tub.registerReference(ref, furlFile=furl_file)
155 158
156 159 if location == '':
157 160 d = engine_tub.setLocationAutomatically()
158 161 else:
159 162 d = defer.maybeDeferred(engine_tub.setLocation, "%s:%i" % (location, engine_listener.getPortnum()))
160 163
161 164 furl_file = config['controller']['engine_furl_file']
162 165 engine_fc_interface = import_item(config['controller']['engine_fc_interface'])
163 166 log.msg("Saving furl for the engine to file: %s" % furl_file)
164 167 check_furl_file_security(furl_file, secure)
165 168 fc_controller = engine_fc_interface(controller_service)
166 169 d.addCallback(register, fc_controller, furl_file=furl_file)
167 170
168 171 reactor.callWhenRunning(set_location_and_register)
169 172 return engine_tub
170 173
171 174 def start_controller():
172 175 """
173 176 Start the controller by creating the service hierarchy and starting the reactor.
174 177
175 178 This method does the following:
176 179
177 180 * It starts the controller logging
178 181 * In execute an import statement for the controller
179 182 * It creates 2 `foolscap.Tub` instances for the client and the engines
180 183 and registers `foolscap.Referenceables` with the tubs to expose the
181 184 controller to engines and clients.
182 185 """
183 186 config = kernel_config_manager.get_config_obj()
184 187
185 188 # Start logging
186 189 logfile = config['controller']['logfile']
187 190 if logfile:
188 191 logfile = logfile + str(os.getpid()) + '.log'
189 192 try:
190 193 openLogFile = open(logfile, 'w')
191 194 except:
192 195 openLogFile = sys.stdout
193 196 else:
194 197 openLogFile = sys.stdout
195 198 log.startLogging(openLogFile)
196 199
197 200 # Execute any user defined import statements
198 201 cis = config['controller']['import_statement']
199 202 if cis:
200 203 try:
201 204 exec cis in globals(), locals()
202 205 except:
203 206 log.msg("Error running import_statement: %s" % cis)
204 207
208 # Delete old furl files unless the reuse_furls is set
209 reuse = config['controller']['reuse_furls']
210 if not reuse:
211 paths = (config['controller']['engine_furl_file'],
212 config['controller']['controller_interfaces']['task']['furl_file'],
213 config['controller']['controller_interfaces']['multiengine']['furl_file']
214 )
215 for p in paths:
216 if os.path.isfile(p):
217 os.remove(p)
218
205 219 # Create the service hierarchy
206 220 main_service = service.MultiService()
207 221 # The controller service
208 222 controller_service = controllerservice.ControllerService()
209 223 controller_service.setServiceParent(main_service)
210 224 # The client tub and all its refereceables
211 225 client_service = make_client_service(controller_service, config)
212 226 client_service.setServiceParent(main_service)
213 227 # The engine tub
214 228 engine_service = make_engine_service(controller_service, config)
215 229 engine_service.setServiceParent(main_service)
216 230 # Start the controller service and set things running
217 231 main_service.startService()
218 232 reactor.run()
219 233
220 234 def init_config():
221 235 """
222 236 Initialize the configuration using default and command line options.
223 237 """
224 238
225 239 parser = OptionParser()
226 240
227 241 # Client related options
228 242 parser.add_option(
229 243 "--client-ip",
230 244 type="string",
231 245 dest="client_ip",
232 246 help="the IP address or hostname the controller will listen on for client connections"
233 247 )
234 248 parser.add_option(
235 249 "--client-port",
236 250 type="int",
237 251 dest="client_port",
238 252 help="the port the controller will listen on for client connections"
239 253 )
240 254 parser.add_option(
241 255 '--client-location',
242 256 type="string",
243 257 dest="client_location",
244 258 help="hostname or ip for clients to connect to"
245 259 )
246 260 parser.add_option(
247 261 "-x",
248 262 action="store_false",
249 263 dest="client_secure",
250 264 help="turn off all client security"
251 265 )
252 266 parser.add_option(
253 267 '--client-cert-file',
254 268 type="string",
255 269 dest="client_cert_file",
256 270 help="file to store the client SSL certificate"
257 271 )
258 272 parser.add_option(
259 273 '--task-furl-file',
260 274 type="string",
261 275 dest="task_furl_file",
262 276 help="file to store the FURL for task clients to connect with"
263 277 )
264 278 parser.add_option(
265 279 '--multiengine-furl-file',
266 280 type="string",
267 281 dest="multiengine_furl_file",
268 282 help="file to store the FURL for multiengine clients to connect with"
269 283 )
270 284 # Engine related options
271 285 parser.add_option(
272 286 "--engine-ip",
273 287 type="string",
274 288 dest="engine_ip",
275 289 help="the IP address or hostname the controller will listen on for engine connections"
276 290 )
277 291 parser.add_option(
278 292 "--engine-port",
279 293 type="int",
280 294 dest="engine_port",
281 295 help="the port the controller will listen on for engine connections"
282 296 )
283 297 parser.add_option(
284 298 '--engine-location',
285 299 type="string",
286 300 dest="engine_location",
287 301 help="hostname or ip for engines to connect to"
288 302 )
289 303 parser.add_option(
290 304 "-y",
291 305 action="store_false",
292 306 dest="engine_secure",
293 307 help="turn off all engine security"
294 308 )
295 309 parser.add_option(
296 310 '--engine-cert-file',
297 311 type="string",
298 312 dest="engine_cert_file",
299 313 help="file to store the engine SSL certificate"
300 314 )
301 315 parser.add_option(
302 316 '--engine-furl-file',
303 317 type="string",
304 318 dest="engine_furl_file",
305 319 help="file to store the FURL for engines to connect with"
306 320 )
307 321 parser.add_option(
308 322 "-l", "--logfile",
309 323 type="string",
310 324 dest="logfile",
311 325 help="log file name (default is stdout)"
312 326 )
313 327 parser.add_option(
314 328 "--ipythondir",
315 329 type="string",
316 330 dest="ipythondir",
317 331 help="look for config files and profiles in this directory"
318 332 )
333 parser.add_option(
334 "-r",
335 action="store_true",
336 dest="reuse_furls",
337 help="try to reuse all furl files"
338 )
319 339
320 340 (options, args) = parser.parse_args()
321 341
322 342 kernel_config_manager.update_config_obj_from_default_file(options.ipythondir)
323 343 config = kernel_config_manager.get_config_obj()
324 344
325 345 # Update with command line options
326 346 if options.client_ip is not None:
327 347 config['controller']['client_tub']['ip'] = options.client_ip
328 348 if options.client_port is not None:
329 349 config['controller']['client_tub']['port'] = options.client_port
330 350 if options.client_location is not None:
331 351 config['controller']['client_tub']['location'] = options.client_location
332 352 if options.client_secure is not None:
333 353 config['controller']['client_tub']['secure'] = options.client_secure
334 354 if options.client_cert_file is not None:
335 355 config['controller']['client_tub']['cert_file'] = options.client_cert_file
336 356 if options.task_furl_file is not None:
337 357 config['controller']['controller_interfaces']['task']['furl_file'] = options.task_furl_file
338 358 if options.multiengine_furl_file is not None:
339 359 config['controller']['controller_interfaces']['multiengine']['furl_file'] = options.multiengine_furl_file
340 360 if options.engine_ip is not None:
341 361 config['controller']['engine_tub']['ip'] = options.engine_ip
342 362 if options.engine_port is not None:
343 363 config['controller']['engine_tub']['port'] = options.engine_port
344 364 if options.engine_location is not None:
345 365 config['controller']['engine_tub']['location'] = options.engine_location
346 366 if options.engine_secure is not None:
347 367 config['controller']['engine_tub']['secure'] = options.engine_secure
348 368 if options.engine_cert_file is not None:
349 369 config['controller']['engine_tub']['cert_file'] = options.engine_cert_file
350 370 if options.engine_furl_file is not None:
351 371 config['controller']['engine_furl_file'] = options.engine_furl_file
372 if options.reuse_furls is not None:
373 config['controller']['reuse_furls'] = options.reuse_furls
352 374
353 375 if options.logfile is not None:
354 376 config['controller']['logfile'] = options.logfile
355 377
356 378 kernel_config_manager.update_config_obj(config)
357 379
358 380 def main():
359 381 """
360 382 After creating the configuration information, start the controller.
361 383 """
362 384 init_config()
363 385 start_controller()
364 386
365 387 if __name__ == "__main__":
366 388 main()
@@ -1,171 +1,176
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3
4 4 """Start the IPython Engine."""
5 5
6 6 __docformat__ = "restructuredtext en"
7 7
8 8 #-------------------------------------------------------------------------------
9 9 # Copyright (C) 2008 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-------------------------------------------------------------------------------
14 14
15 15 #-------------------------------------------------------------------------------
16 16 # Imports
17 17 #-------------------------------------------------------------------------------
18 18
19 19 # Python looks for an empty string at the beginning of sys.path to enable
20 20 # importing from the cwd.
21 21 import sys
22 22 sys.path.insert(0, '')
23 23
24 24 import sys, os
25 25 from optparse import OptionParser
26 26
27 27 from twisted.application import service
28 28 from twisted.internet import reactor
29 29 from twisted.python import log
30 30
31 31 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
32 32
33 33 from IPython.kernel.core.config import config_manager as core_config_manager
34 34 from IPython.config.cutils import import_item
35 35 from IPython.kernel.engineservice import EngineService
36 36 from IPython.kernel.config import config_manager as kernel_config_manager
37 37 from IPython.kernel.engineconnector import EngineConnector
38 38
39 39
40 40 #-------------------------------------------------------------------------------
41 41 # Code
42 42 #-------------------------------------------------------------------------------
43 43
44 44 def start_engine():
45 45 """
46 46 Start the engine, by creating it and starting the Twisted reactor.
47 47
48 48 This method does:
49 49
50 50 * If it exists, runs the `mpi_import_statement` to call `MPI_Init`
51 51 * Starts the engine logging
52 52 * Creates an IPython shell and wraps it in an `EngineService`
53 53 * Creates a `foolscap.Tub` to use in connecting to a controller.
54 54 * Uses the tub and the `EngineService` along with a Foolscap URL
55 55 (or FURL) to connect to the controller and register the engine
56 56 with the controller
57 57 """
58 58 kernel_config = kernel_config_manager.get_config_obj()
59 59 core_config = core_config_manager.get_config_obj()
60 60
61 61
62 62 # Execute the mpi import statement that needs to call MPI_Init
63 63 global mpi
64 64 mpikey = kernel_config['mpi']['default']
65 65 mpi_import_statement = kernel_config['mpi'].get(mpikey, None)
66 66 if mpi_import_statement is not None:
67 67 try:
68 68 exec mpi_import_statement in globals()
69 69 except:
70 70 mpi = None
71 71 else:
72 72 mpi = None
73 73
74 74 # Start logging
75 75 logfile = kernel_config['engine']['logfile']
76 76 if logfile:
77 77 logfile = logfile + str(os.getpid()) + '.log'
78 78 try:
79 79 openLogFile = open(logfile, 'w')
80 80 except:
81 81 openLogFile = sys.stdout
82 82 else:
83 83 openLogFile = sys.stdout
84 84 log.startLogging(openLogFile)
85 85
86 86 # Create the underlying shell class and EngineService
87 87 shell_class = import_item(core_config['shell']['shell_class'])
88 88 engine_service = EngineService(shell_class, mpi=mpi)
89 89 shell_import_statement = core_config['shell']['import_statement']
90 90 if shell_import_statement:
91 91 try:
92 92 engine_service.execute(shell_import_statement)
93 93 except:
94 log.msg("Error running import_statement: %s" % sis)
94 log.msg("Error running import_statement: %s" % shell_import_statement)
95 95
96 96 # Create the service hierarchy
97 97 main_service = service.MultiService()
98 98 engine_service.setServiceParent(main_service)
99 99 tub_service = Tub()
100 100 tub_service.setServiceParent(main_service)
101 101 # This needs to be called before the connection is initiated
102 102 main_service.startService()
103 103
104 104 # This initiates the connection to the controller and calls
105 105 # register_engine to tell the controller we are ready to do work
106 106 engine_connector = EngineConnector(tub_service)
107 107 furl_file = kernel_config['engine']['furl_file']
108 log.msg("Using furl file: %s" % furl_file)
108 109 d = engine_connector.connect_to_controller(engine_service, furl_file)
109 d.addErrback(lambda _: reactor.stop())
110 def handle_error(f):
111 log.err(f)
112 if reactor.running:
113 reactor.stop()
114 d.addErrback(handle_error)
110 115
111 116 reactor.run()
112 117
113 118
114 119 def init_config():
115 120 """
116 121 Initialize the configuration using default and command line options.
117 122 """
118 123
119 124 parser = OptionParser()
120 125
121 126 parser.add_option(
122 127 "--furl-file",
123 128 type="string",
124 129 dest="furl_file",
125 130 help="The filename containing the FURL of the controller"
126 131 )
127 132 parser.add_option(
128 133 "--mpi",
129 134 type="string",
130 135 dest="mpi",
131 136 help="How to enable MPI (mpi4py, pytrilinos, or empty string to disable)"
132 137 )
133 138 parser.add_option(
134 139 "-l",
135 140 "--logfile",
136 141 type="string",
137 142 dest="logfile",
138 143 help="log file name (default is stdout)"
139 144 )
140 145 parser.add_option(
141 146 "--ipythondir",
142 147 type="string",
143 148 dest="ipythondir",
144 149 help="look for config files and profiles in this directory"
145 150 )
146 151
147 152 (options, args) = parser.parse_args()
148 153
149 154 kernel_config_manager.update_config_obj_from_default_file(options.ipythondir)
150 155 core_config_manager.update_config_obj_from_default_file(options.ipythondir)
151 156
152 157 kernel_config = kernel_config_manager.get_config_obj()
153 158 # Now override with command line options
154 159 if options.furl_file is not None:
155 160 kernel_config['engine']['furl_file'] = options.furl_file
156 161 if options.logfile is not None:
157 162 kernel_config['engine']['logfile'] = options.logfile
158 163 if options.mpi is not None:
159 164 kernel_config['mpi']['default'] = options.mpi
160 165
161 166
162 167 def main():
163 168 """
164 169 After creating the configuration information, start the engine.
165 170 """
166 171 init_config()
167 172 start_engine()
168 173
169 174
170 175 if __name__ == "__main__":
171 main() No newline at end of file
176 main()
@@ -1,373 +1,373
1 1 # encoding: utf-8
2 2
3 3 """Test template for complete engine object"""
4 4
5 5 __docformat__ = "restructuredtext en"
6 6
7 7 #-------------------------------------------------------------------------------
8 8 # Copyright (C) 2008 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-------------------------------------------------------------------------------
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Imports
16 16 #-------------------------------------------------------------------------------
17 17
18 18 import cPickle as pickle
19 19
20 20 from twisted.internet import defer, reactor
21 21 from twisted.python import failure
22 22 from twisted.application import service
23 23 import zope.interface as zi
24 24
25 25 from IPython.kernel import newserialized
26 26 from IPython.kernel import error
27 27 from IPython.kernel.pickleutil import can, uncan
28 28 import IPython.kernel.engineservice as es
29 29 from IPython.kernel.core.interpreter import Interpreter
30 30 from IPython.testing.parametric import Parametric, parametric
31 31
32 32 #-------------------------------------------------------------------------------
33 33 # Tests
34 34 #-------------------------------------------------------------------------------
35 35
36 36
37 37 # A sequence of valid commands run through execute
38 38 validCommands = ['a=5',
39 39 'b=10',
40 40 'a=5; b=10; c=a+b',
41 41 'import math; 2.0*math.pi',
42 42 """def f():
43 43 result = 0.0
44 44 for i in range(10):
45 45 result += i
46 46 """,
47 47 'if 1<2: a=5',
48 48 """import time
49 49 time.sleep(0.1)""",
50 50 """from math import cos;
51 51 x = 1.0*cos(0.5)""", # Semicolons lead to Discard ast nodes that should be discarded
52 52 """from sets import Set
53 53 s = Set()
54 54 """, # Trailing whitespace should be allowed.
55 55 """import math
56 56 math.cos(1.0)""", # Test a method call with a discarded return value
57 57 """x=1.0234
58 58 a=5; b=10""", # Test an embedded semicolon
59 59 """x=1.0234
60 60 a=5; b=10;""" # Test both an embedded and trailing semicolon
61 61 ]
62 62
63 63 # A sequence of commands that raise various exceptions
64 64 invalidCommands = [('a=1/0',ZeroDivisionError),
65 65 ('print v',NameError),
66 66 ('l=[];l[0]',IndexError),
67 67 ("d={};d['a']",KeyError),
68 68 ("assert 1==0",AssertionError),
69 69 ("import abababsdbfsbaljasdlja",ImportError),
70 70 ("raise Exception()",Exception)]
71 71
72 72 def testf(x):
73 73 return 2.0*x
74 74
75 75 globala = 99
76 76
77 77 def testg(x):
78 78 return globala*x
79 79
80 80 class IEngineCoreTestCase(object):
81 81 """Test an IEngineCore implementer."""
82 82
83 83 def createShell(self):
84 84 return Interpreter()
85 85
86 86 def catchQueueCleared(self, f):
87 87 try:
88 88 f.raiseException()
89 89 except error.QueueCleared:
90 90 pass
91 91
92 92 def testIEngineCoreInterface(self):
93 93 """Does self.engine claim to implement IEngineCore?"""
94 94 self.assert_(es.IEngineCore.providedBy(self.engine))
95 95
96 96 def testIEngineCoreInterfaceMethods(self):
97 97 """Does self.engine have the methods and attributes in IEngineCore."""
98 98 for m in list(es.IEngineCore):
99 99 self.assert_(hasattr(self.engine, m))
100 100
101 101 def testIEngineCoreDeferreds(self):
102 102 d = self.engine.execute('a=5')
103 103 d.addCallback(lambda _: self.engine.pull('a'))
104 104 d.addCallback(lambda _: self.engine.get_result())
105 105 d.addCallback(lambda _: self.engine.keys())
106 106 d.addCallback(lambda _: self.engine.push(dict(a=10)))
107 107 return d
108 108
109 109 def runTestExecute(self, cmd):
110 110 self.shell = Interpreter()
111 111 actual = self.shell.execute(cmd)
112 112 def compare(computed):
113 113 actual['id'] = computed['id']
114 114 self.assertEquals(actual, computed)
115 115 d = self.engine.execute(cmd)
116 116 d.addCallback(compare)
117 117 return d
118 118
119 119 @parametric
120 120 def testExecute(cls):
121 121 return [(cls.runTestExecute, cmd) for cmd in validCommands]
122 122
123 123 def runTestExecuteFailures(self, cmd, exc):
124 124 def compare(f):
125 125 self.assertRaises(exc, f.raiseException)
126 126 d = self.engine.execute(cmd)
127 127 d.addErrback(compare)
128 128 return d
129 129
130 130 @parametric
131 131 def testExecuteFailuresEngineService(cls):
132 132 return [(cls.runTestExecuteFailures, cmd, exc)
133 133 for cmd, exc in invalidCommands]
134 134
135 135 def runTestPushPull(self, o):
136 136 d = self.engine.push(dict(a=o))
137 137 d.addCallback(lambda r: self.engine.pull('a'))
138 138 d.addCallback(lambda r: self.assertEquals(o,r))
139 139 return d
140 140
141 141 @parametric
142 142 def testPushPull(cls):
143 143 objs = [10,"hi there",1.2342354,{"p":(1,2)},None]
144 144 return [(cls.runTestPushPull, o) for o in objs]
145 145
146 146 def testPullNameError(self):
147 147 d = self.engine.push(dict(a=5))
148 148 d.addCallback(lambda _:self.engine.reset())
149 149 d.addCallback(lambda _: self.engine.pull("a"))
150 150 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
151 151 return d
152 152
153 153 def testPushPullFailures(self):
154 154 d = self.engine.pull('a')
155 155 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
156 156 d.addCallback(lambda _: self.engine.execute('l = lambda x: x'))
157 157 d.addCallback(lambda _: self.engine.pull('l'))
158 158 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
159 159 d.addCallback(lambda _: self.engine.push(dict(l=lambda x: x)))
160 160 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
161 161 return d
162 162
163 163 def testPushPullArray(self):
164 164 try:
165 165 import numpy
166 166 except:
167 167 return
168 168 a = numpy.random.random(1000)
169 169 d = self.engine.push(dict(a=a))
170 170 d.addCallback(lambda _: self.engine.pull('a'))
171 171 d.addCallback(lambda b: b==a)
172 172 d.addCallback(lambda c: c.all())
173 173 return self.assertDeferredEquals(d, True)
174 174
175 175 def testPushFunction(self):
176 176
177 177 d = self.engine.push_function(dict(f=testf))
178 178 d.addCallback(lambda _: self.engine.execute('result = f(10)'))
179 179 d.addCallback(lambda _: self.engine.pull('result'))
180 180 d.addCallback(lambda r: self.assertEquals(r, testf(10)))
181 181 return d
182 182
183 183 def testPullFunction(self):
184 184 d = self.engine.push_function(dict(f=testf, g=testg))
185 185 d.addCallback(lambda _: self.engine.pull_function(('f','g')))
186 186 d.addCallback(lambda r: self.assertEquals(r[0](10), testf(10)))
187 187 return d
188 188
189 189 def testPushFunctionGlobal(self):
190 190 """Make sure that pushed functions pick up the user's namespace for globals."""
191 191 d = self.engine.push(dict(globala=globala))
192 192 d.addCallback(lambda _: self.engine.push_function(dict(g=testg)))
193 193 d.addCallback(lambda _: self.engine.execute('result = g(10)'))
194 194 d.addCallback(lambda _: self.engine.pull('result'))
195 195 d.addCallback(lambda r: self.assertEquals(r, testg(10)))
196 196 return d
197 197
198 198 def testGetResultFailure(self):
199 199 d = self.engine.get_result(None)
200 200 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
201 201 d.addCallback(lambda _: self.engine.get_result(10))
202 202 d.addErrback(lambda f: self.assertRaises(IndexError, f.raiseException))
203 203 return d
204 204
205 205 def runTestGetResult(self, cmd):
206 206 self.shell = Interpreter()
207 207 actual = self.shell.execute(cmd)
208 208 def compare(computed):
209 209 actual['id'] = computed['id']
210 210 self.assertEquals(actual, computed)
211 211 d = self.engine.execute(cmd)
212 212 d.addCallback(lambda r: self.engine.get_result(r['number']))
213 213 d.addCallback(compare)
214 214 return d
215 215
216 216 @parametric
217 217 def testGetResult(cls):
218 218 return [(cls.runTestGetResult, cmd) for cmd in validCommands]
219 219
220 220 def testGetResultDefault(self):
221 221 cmd = 'a=5'
222 222 shell = self.createShell()
223 223 shellResult = shell.execute(cmd)
224 224 def popit(dikt, key):
225 225 dikt.pop(key)
226 226 return dikt
227 227 d = self.engine.execute(cmd)
228 228 d.addCallback(lambda _: self.engine.get_result())
229 229 d.addCallback(lambda r: self.assertEquals(shellResult, popit(r,'id')))
230 230 return d
231 231
232 232 def testKeys(self):
233 233 d = self.engine.keys()
234 234 d.addCallback(lambda s: isinstance(s, list))
235 235 d.addCallback(lambda r: self.assertEquals(r, True))
236 236 return d
237 237
238 238 Parametric(IEngineCoreTestCase)
239 239
240 240 class IEngineSerializedTestCase(object):
241 241 """Test an IEngineCore implementer."""
242 242
243 243 def testIEngineSerializedInterface(self):
244 244 """Does self.engine claim to implement IEngineCore?"""
245 245 self.assert_(es.IEngineSerialized.providedBy(self.engine))
246 246
247 247 def testIEngineSerializedInterfaceMethods(self):
248 """Does self.engine have the methods and attributes in IEngireCore."""
248 """Does self.engine have the methods and attributes in IEngineCore."""
249 249 for m in list(es.IEngineSerialized):
250 250 self.assert_(hasattr(self.engine, m))
251 251
252 252 def testIEngineSerializedDeferreds(self):
253 253 dList = []
254 254 d = self.engine.push_serialized(dict(key=newserialized.serialize(12345)))
255 255 self.assert_(isinstance(d, defer.Deferred))
256 256 dList.append(d)
257 257 d = self.engine.pull_serialized('key')
258 258 self.assert_(isinstance(d, defer.Deferred))
259 259 dList.append(d)
260 260 D = defer.DeferredList(dList)
261 261 return D
262 262
263 263 def testPushPullSerialized(self):
264 264 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
265 265 d = defer.succeed(None)
266 266 for o in objs:
267 267 self.engine.push_serialized(dict(key=newserialized.serialize(o)))
268 268 value = self.engine.pull_serialized('key')
269 269 value.addCallback(lambda serial: newserialized.IUnSerialized(serial).getObject())
270 270 d = self.assertDeferredEquals(value,o,d)
271 271 return d
272 272
273 273 def testPullSerializedFailures(self):
274 274 d = self.engine.pull_serialized('a')
275 275 d.addErrback(lambda f: self.assertRaises(NameError, f.raiseException))
276 276 d.addCallback(lambda _: self.engine.execute('l = lambda x: x'))
277 277 d.addCallback(lambda _: self.engine.pull_serialized('l'))
278 278 d.addErrback(lambda f: self.assertRaises(pickle.PicklingError, f.raiseException))
279 279 return d
280 280
281 281 Parametric(IEngineSerializedTestCase)
282 282
283 283 class IEngineQueuedTestCase(object):
284 284 """Test an IEngineQueued implementer."""
285 285
286 286 def testIEngineQueuedInterface(self):
287 287 """Does self.engine claim to implement IEngineQueued?"""
288 288 self.assert_(es.IEngineQueued.providedBy(self.engine))
289 289
290 290 def testIEngineQueuedInterfaceMethods(self):
291 """Does self.engine have the methods and attributes in IEngireQueued."""
291 """Does self.engine have the methods and attributes in IEngineQueued."""
292 292 for m in list(es.IEngineQueued):
293 293 self.assert_(hasattr(self.engine, m))
294 294
295 295 def testIEngineQueuedDeferreds(self):
296 296 dList = []
297 297 d = self.engine.clear_queue()
298 298 self.assert_(isinstance(d, defer.Deferred))
299 299 dList.append(d)
300 300 d = self.engine.queue_status()
301 301 self.assert_(isinstance(d, defer.Deferred))
302 302 dList.append(d)
303 303 D = defer.DeferredList(dList)
304 304 return D
305 305
306 306 def testClearQueue(self):
307 307 result = self.engine.clear_queue()
308 308 d1 = self.assertDeferredEquals(result, None)
309 309 d1.addCallback(lambda _: self.engine.queue_status())
310 310 d2 = self.assertDeferredEquals(d1, {'queue':[], 'pending':'None'})
311 311 return d2
312 312
313 313 def testQueueStatus(self):
314 314 result = self.engine.queue_status()
315 315 result.addCallback(lambda r: 'queue' in r and 'pending' in r)
316 316 d = self.assertDeferredEquals(result, True)
317 317 return d
318 318
319 319 Parametric(IEngineQueuedTestCase)
320 320
321 321 class IEnginePropertiesTestCase(object):
322 322 """Test an IEngineProperties implementor."""
323 323
324 324 def testIEnginePropertiesInterface(self):
325 325 """Does self.engine claim to implement IEngineProperties?"""
326 326 self.assert_(es.IEngineProperties.providedBy(self.engine))
327 327
328 328 def testIEnginePropertiesInterfaceMethods(self):
329 """Does self.engine have the methods and attributes in IEngireProperties."""
329 """Does self.engine have the methods and attributes in IEngineProperties."""
330 330 for m in list(es.IEngineProperties):
331 331 self.assert_(hasattr(self.engine, m))
332 332
333 333 def testGetSetProperties(self):
334 334 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
335 335 d = self.engine.set_properties(dikt)
336 336 d.addCallback(lambda r: self.engine.get_properties())
337 337 d = self.assertDeferredEquals(d, dikt)
338 338 d.addCallback(lambda r: self.engine.get_properties(('c',)))
339 339 d = self.assertDeferredEquals(d, {'c': dikt['c']})
340 340 d.addCallback(lambda r: self.engine.set_properties(dict(c=False)))
341 341 d.addCallback(lambda r: self.engine.get_properties(('c', 'd')))
342 342 d = self.assertDeferredEquals(d, dict(c=False, d=None))
343 343 return d
344 344
345 345 def testClearProperties(self):
346 346 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
347 347 d = self.engine.set_properties(dikt)
348 348 d.addCallback(lambda r: self.engine.clear_properties())
349 349 d.addCallback(lambda r: self.engine.get_properties())
350 350 d = self.assertDeferredEquals(d, {})
351 351 return d
352 352
353 353 def testDelHasProperties(self):
354 354 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
355 355 d = self.engine.set_properties(dikt)
356 356 d.addCallback(lambda r: self.engine.del_properties(('b','e')))
357 357 d.addCallback(lambda r: self.engine.has_properties(('a','b','c','d','e')))
358 358 d = self.assertDeferredEquals(d, [True, False, True, True, False])
359 359 return d
360 360
361 361 def testStrictDict(self):
362 362 s = """from IPython.kernel.engineservice import get_engine
363 363 p = get_engine(%s).properties"""%self.engine.id
364 364 d = self.engine.execute(s)
365 365 d.addCallback(lambda r: self.engine.execute("p['a'] = lambda _:None"))
366 366 d = self.assertDeferredRaises(d, error.InvalidProperty)
367 367 d.addCallback(lambda r: self.engine.execute("p['a'] = range(5)"))
368 368 d.addCallback(lambda r: self.engine.execute("p['a'].append(5)"))
369 369 d.addCallback(lambda r: self.engine.get_properties('a'))
370 370 d = self.assertDeferredEquals(d, dict(a=range(5)))
371 371 return d
372 372
373 373 Parametric(IEnginePropertiesTestCase)
@@ -1,828 +1,827
1 1 # encoding: utf-8
2 2
3 3 """"""
4 4
5 5 __docformat__ = "restructuredtext en"
6 6
7 7 #-------------------------------------------------------------------------------
8 8 # Copyright (C) 2008 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-------------------------------------------------------------------------------
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Imports
16 16 #-------------------------------------------------------------------------------
17 17
18 18 from twisted.internet import defer
19 19
20 20 from IPython.kernel import engineservice as es
21 21 from IPython.kernel import multiengine as me
22 22 from IPython.kernel import newserialized
23 from IPython.kernel.error import NotDefined
24 23 from IPython.testing import util
25 24 from IPython.testing.parametric import parametric, Parametric
26 25 from IPython.kernel import newserialized
27 26 from IPython.kernel.util import printer
28 27 from IPython.kernel.error import (InvalidEngineID,
29 28 NoEnginesRegistered,
30 29 CompositeError,
31 30 InvalidDeferredID)
32 31 from IPython.kernel.tests.engineservicetest import validCommands, invalidCommands
33 32 from IPython.kernel.core.interpreter import Interpreter
34 33
35 34
36 35 #-------------------------------------------------------------------------------
37 36 # Base classes and utilities
38 37 #-------------------------------------------------------------------------------
39 38
40 39 class IMultiEngineBaseTestCase(object):
41 40 """Basic utilities for working with multiengine tests.
42 41
43 42 Some subclass should define:
44 43
45 44 * self.multiengine
46 45 * self.engines to keep track of engines for clean up"""
47 46
48 47 def createShell(self):
49 48 return Interpreter()
50 49
51 50 def addEngine(self, n=1):
52 51 for i in range(n):
53 52 e = es.EngineService()
54 53 e.startService()
55 54 regDict = self.controller.register_engine(es.QueuedEngine(e), None)
56 55 e.id = regDict['id']
57 56 self.engines.append(e)
58 57
59 58
60 59 def testf(x):
61 60 return 2.0*x
62 61
63 62
64 63 globala = 99
65 64
66 65
67 66 def testg(x):
68 67 return globala*x
69 68
70 69
71 70 def isdid(did):
72 71 if not isinstance(did, str):
73 72 return False
74 73 if not len(did)==40:
75 74 return False
76 75 return True
77 76
78 77
79 78 def _raise_it(f):
80 79 try:
81 80 f.raiseException()
82 81 except CompositeError, e:
83 82 e.raise_exception()
84 83
85 84 #-------------------------------------------------------------------------------
86 85 # IMultiEngineTestCase
87 86 #-------------------------------------------------------------------------------
88 87
89 88 class IMultiEngineTestCase(IMultiEngineBaseTestCase):
90 89 """A test for any object that implements IEngineMultiplexer.
91 90
92 91 self.multiengine must be defined and implement IEngineMultiplexer.
93 92 """
94 93
95 94 def testIMultiEngineInterface(self):
96 95 """Does self.engine claim to implement IEngineCore?"""
97 96 self.assert_(me.IEngineMultiplexer.providedBy(self.multiengine))
98 97 self.assert_(me.IMultiEngine.providedBy(self.multiengine))
99 98
100 99 def testIEngineMultiplexerInterfaceMethods(self):
101 100 """Does self.engine have the methods and attributes in IEngineCore."""
102 101 for m in list(me.IEngineMultiplexer):
103 102 self.assert_(hasattr(self.multiengine, m))
104 103
105 104 def testIEngineMultiplexerDeferreds(self):
106 105 self.addEngine(1)
107 106 d= self.multiengine.execute('a=5', targets=0)
108 107 d.addCallback(lambda _: self.multiengine.push(dict(a=5),targets=0))
109 108 d.addCallback(lambda _: self.multiengine.push(dict(a=5, b='asdf', c=[1,2,3]),targets=0))
110 109 d.addCallback(lambda _: self.multiengine.pull(('a','b','c'),targets=0))
111 110 d.addCallback(lambda _: self.multiengine.get_result(targets=0))
112 111 d.addCallback(lambda _: self.multiengine.reset(targets=0))
113 112 d.addCallback(lambda _: self.multiengine.keys(targets=0))
114 113 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)),targets=0))
115 114 d.addCallback(lambda _: self.multiengine.pull_serialized('a',targets=0))
116 115 d.addCallback(lambda _: self.multiengine.clear_queue(targets=0))
117 116 d.addCallback(lambda _: self.multiengine.queue_status(targets=0))
118 117 return d
119 118
120 119 def testInvalidEngineID(self):
121 120 self.addEngine(1)
122 121 badID = 100
123 122 d = self.multiengine.execute('a=5', targets=badID)
124 123 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
125 124 d.addCallback(lambda _: self.multiengine.push(dict(a=5), targets=badID))
126 125 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
127 126 d.addCallback(lambda _: self.multiengine.pull('a', targets=badID))
128 127 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
129 128 d.addCallback(lambda _: self.multiengine.reset(targets=badID))
130 129 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
131 130 d.addCallback(lambda _: self.multiengine.keys(targets=badID))
132 131 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
133 132 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)), targets=badID))
134 133 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
135 134 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=badID))
136 135 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
137 136 d.addCallback(lambda _: self.multiengine.queue_status(targets=badID))
138 137 d.addErrback(lambda f: self.assertRaises(InvalidEngineID, f.raiseException))
139 138 return d
140 139
141 140 def testNoEnginesRegistered(self):
142 141 badID = 'all'
143 142 d= self.multiengine.execute('a=5', targets=badID)
144 143 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
145 144 d.addCallback(lambda _: self.multiengine.push(dict(a=5), targets=badID))
146 145 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
147 146 d.addCallback(lambda _: self.multiengine.pull('a', targets=badID))
148 147 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
149 148 d.addCallback(lambda _: self.multiengine.get_result(targets=badID))
150 149 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
151 150 d.addCallback(lambda _: self.multiengine.reset(targets=badID))
152 151 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
153 152 d.addCallback(lambda _: self.multiengine.keys(targets=badID))
154 153 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
155 154 d.addCallback(lambda _: self.multiengine.push_serialized(dict(a=newserialized.serialize(10)), targets=badID))
156 155 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
157 156 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=badID))
158 157 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
159 158 d.addCallback(lambda _: self.multiengine.queue_status(targets=badID))
160 159 d.addErrback(lambda f: self.assertRaises(NoEnginesRegistered, f.raiseException))
161 160 return d
162 161
163 162 def runExecuteAll(self, d, cmd, shell):
164 163 actual = shell.execute(cmd)
165 164 d.addCallback(lambda _: self.multiengine.execute(cmd))
166 165 def compare(result):
167 166 for r in result:
168 167 actual['id'] = r['id']
169 168 self.assertEquals(r, actual)
170 169 d.addCallback(compare)
171 170
172 171 def testExecuteAll(self):
173 172 self.addEngine(4)
174 173 d= defer.Deferred()
175 174 shell = Interpreter()
176 175 for cmd in validCommands:
177 176 self.runExecuteAll(d, cmd, shell)
178 177 d.callback(None)
179 178 return d
180 179
181 180 # The following two methods show how to do parametrized
182 181 # tests. This is really slick! Same is used above.
183 182 def runExecuteFailures(self, cmd, exc):
184 183 self.addEngine(4)
185 184 d= self.multiengine.execute(cmd)
186 185 d.addErrback(lambda f: self.assertRaises(exc, _raise_it, f))
187 186 return d
188 187
189 188 @parametric
190 189 def testExecuteFailuresMultiEng(cls):
191 190 return [(cls.runExecuteFailures,cmd,exc) for
192 191 cmd,exc in invalidCommands]
193 192
194 193 def testPushPull(self):
195 194 self.addEngine(1)
196 195 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
197 196 d= self.multiengine.push(dict(key=objs[0]), targets=0)
198 197 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
199 198 d.addCallback(lambda r: self.assertEquals(r, [objs[0]]))
200 199 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[1]), targets=0))
201 200 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
202 201 d.addCallback(lambda r: self.assertEquals(r, [objs[1]]))
203 202 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[2]), targets=0))
204 203 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
205 204 d.addCallback(lambda r: self.assertEquals(r, [objs[2]]))
206 205 d.addCallback(lambda _: self.multiengine.push(dict(key=objs[3]), targets=0))
207 206 d.addCallback(lambda _: self.multiengine.pull('key', targets=0))
208 207 d.addCallback(lambda r: self.assertEquals(r, [objs[3]]))
209 208 d.addCallback(lambda _: self.multiengine.reset(targets=0))
210 209 d.addCallback(lambda _: self.multiengine.pull('a', targets=0))
211 210 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
212 211 d.addCallback(lambda _: self.multiengine.push(dict(a=10,b=20)))
213 212 d.addCallback(lambda _: self.multiengine.pull(('a','b')))
214 213 d.addCallback(lambda r: self.assertEquals(r, [[10,20]]))
215 214 return d
216 215
217 216 def testPushPullAll(self):
218 217 self.addEngine(4)
219 218 d= self.multiengine.push(dict(a=10))
220 219 d.addCallback(lambda _: self.multiengine.pull('a'))
221 220 d.addCallback(lambda r: self.assert_(r==[10,10,10,10]))
222 221 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20)))
223 222 d.addCallback(lambda _: self.multiengine.pull(('a','b')))
224 223 d.addCallback(lambda r: self.assert_(r==4*[[10,20]]))
225 224 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20), targets=0))
226 225 d.addCallback(lambda _: self.multiengine.pull(('a','b'), targets=0))
227 226 d.addCallback(lambda r: self.assert_(r==[[10,20]]))
228 227 d.addCallback(lambda _: self.multiengine.push(dict(a=None, b=None), targets=0))
229 228 d.addCallback(lambda _: self.multiengine.pull(('a','b'), targets=0))
230 229 d.addCallback(lambda r: self.assert_(r==[[None,None]]))
231 230 return d
232 231
233 232 def testPushPullSerialized(self):
234 233 self.addEngine(1)
235 234 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
236 235 d= self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[0])), targets=0)
237 236 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
238 237 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
239 238 d.addCallback(lambda r: self.assertEquals(r, objs[0]))
240 239 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[1])), targets=0))
241 240 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
242 241 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
243 242 d.addCallback(lambda r: self.assertEquals(r, objs[1]))
244 243 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[2])), targets=0))
245 244 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
246 245 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
247 246 d.addCallback(lambda r: self.assertEquals(r, objs[2]))
248 247 d.addCallback(lambda _: self.multiengine.push_serialized(dict(key=newserialized.serialize(objs[3])), targets=0))
249 248 d.addCallback(lambda _: self.multiengine.pull_serialized('key', targets=0))
250 249 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
251 250 d.addCallback(lambda r: self.assertEquals(r, objs[3]))
252 251 d.addCallback(lambda _: self.multiengine.push(dict(a=10,b=range(5)), targets=0))
253 252 d.addCallback(lambda _: self.multiengine.pull_serialized(('a','b'), targets=0))
254 253 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
255 254 d.addCallback(lambda r: self.assertEquals(r, [10, range(5)]))
256 255 d.addCallback(lambda _: self.multiengine.reset(targets=0))
257 256 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
258 257 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
259 258 return d
260 259
261 260 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
262 261 d= defer.succeed(None)
263 262 for o in objs:
264 263 self.multiengine.push_serialized(0, key=newserialized.serialize(o))
265 264 value = self.multiengine.pull_serialized(0, 'key')
266 265 value.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
267 266 d = self.assertDeferredEquals(value,o,d)
268 267 return d
269 268
270 269 def runGetResultAll(self, d, cmd, shell):
271 270 actual = shell.execute(cmd)
272 271 d.addCallback(lambda _: self.multiengine.execute(cmd))
273 272 d.addCallback(lambda _: self.multiengine.get_result())
274 273 def compare(result):
275 274 for r in result:
276 275 actual['id'] = r['id']
277 276 self.assertEquals(r, actual)
278 277 d.addCallback(compare)
279 278
280 279 def testGetResultAll(self):
281 280 self.addEngine(4)
282 281 d= defer.Deferred()
283 282 shell = Interpreter()
284 283 for cmd in validCommands:
285 284 self.runGetResultAll(d, cmd, shell)
286 285 d.callback(None)
287 286 return d
288 287
289 288 def testGetResultDefault(self):
290 289 self.addEngine(1)
291 290 target = 0
292 291 cmd = 'a=5'
293 292 shell = self.createShell()
294 293 shellResult = shell.execute(cmd)
295 294 def popit(dikt, key):
296 295 dikt.pop(key)
297 296 return dikt
298 297 d= self.multiengine.execute(cmd, targets=target)
299 298 d.addCallback(lambda _: self.multiengine.get_result(targets=target))
300 299 d.addCallback(lambda r: self.assertEquals(shellResult, popit(r[0],'id')))
301 300 return d
302 301
303 302 def testGetResultFailure(self):
304 303 self.addEngine(1)
305 304 d= self.multiengine.get_result(None, targets=0)
306 305 d.addErrback(lambda f: self.assertRaises(IndexError, _raise_it, f))
307 306 d.addCallback(lambda _: self.multiengine.get_result(10, targets=0))
308 307 d.addErrback(lambda f: self.assertRaises(IndexError, _raise_it, f))
309 308 return d
310 309
311 310 def testPushFunction(self):
312 311 self.addEngine(1)
313 312 d= self.multiengine.push_function(dict(f=testf), targets=0)
314 313 d.addCallback(lambda _: self.multiengine.execute('result = f(10)', targets=0))
315 314 d.addCallback(lambda _: self.multiengine.pull('result', targets=0))
316 315 d.addCallback(lambda r: self.assertEquals(r[0], testf(10)))
317 316 d.addCallback(lambda _: self.multiengine.push(dict(globala=globala), targets=0))
318 317 d.addCallback(lambda _: self.multiengine.push_function(dict(g=testg), targets=0))
319 318 d.addCallback(lambda _: self.multiengine.execute('result = g(10)', targets=0))
320 319 d.addCallback(lambda _: self.multiengine.pull('result', targets=0))
321 320 d.addCallback(lambda r: self.assertEquals(r[0], testg(10)))
322 321 return d
323 322
324 323 def testPullFunction(self):
325 324 self.addEngine(1)
326 325 d= self.multiengine.push(dict(a=globala), targets=0)
327 326 d.addCallback(lambda _: self.multiengine.push_function(dict(f=testf), targets=0))
328 327 d.addCallback(lambda _: self.multiengine.pull_function('f', targets=0))
329 328 d.addCallback(lambda r: self.assertEquals(r[0](10), testf(10)))
330 329 d.addCallback(lambda _: self.multiengine.execute("def g(x): return x*x", targets=0))
331 330 d.addCallback(lambda _: self.multiengine.pull_function(('f','g'),targets=0))
332 331 d.addCallback(lambda r: self.assertEquals((r[0][0](10),r[0][1](10)), (testf(10), 100)))
333 332 return d
334 333
335 334 def testPushFunctionAll(self):
336 335 self.addEngine(4)
337 336 d= self.multiengine.push_function(dict(f=testf))
338 337 d.addCallback(lambda _: self.multiengine.execute('result = f(10)'))
339 338 d.addCallback(lambda _: self.multiengine.pull('result'))
340 339 d.addCallback(lambda r: self.assertEquals(r, 4*[testf(10)]))
341 340 d.addCallback(lambda _: self.multiengine.push(dict(globala=globala)))
342 341 d.addCallback(lambda _: self.multiengine.push_function(dict(testg=testg)))
343 342 d.addCallback(lambda _: self.multiengine.execute('result = testg(10)'))
344 343 d.addCallback(lambda _: self.multiengine.pull('result'))
345 344 d.addCallback(lambda r: self.assertEquals(r, 4*[testg(10)]))
346 345 return d
347 346
348 347 def testPullFunctionAll(self):
349 348 self.addEngine(4)
350 349 d= self.multiengine.push_function(dict(f=testf))
351 350 d.addCallback(lambda _: self.multiengine.pull_function('f'))
352 351 d.addCallback(lambda r: self.assertEquals([func(10) for func in r], 4*[testf(10)]))
353 352 return d
354 353
355 354 def testGetIDs(self):
356 355 self.addEngine(1)
357 356 d= self.multiengine.get_ids()
358 357 d.addCallback(lambda r: self.assertEquals(r, [0]))
359 358 d.addCallback(lambda _: self.addEngine(3))
360 359 d.addCallback(lambda _: self.multiengine.get_ids())
361 360 d.addCallback(lambda r: self.assertEquals(r, [0,1,2,3]))
362 361 return d
363 362
364 363 def testClearQueue(self):
365 364 self.addEngine(4)
366 365 d= self.multiengine.clear_queue()
367 366 d.addCallback(lambda r: self.assertEquals(r,4*[None]))
368 367 return d
369 368
370 369 def testQueueStatus(self):
371 370 self.addEngine(4)
372 371 d= self.multiengine.queue_status(targets=0)
373 372 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
374 373 return d
375 374
376 375 def testGetSetProperties(self):
377 376 self.addEngine(4)
378 377 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
379 378 d= self.multiengine.set_properties(dikt)
380 379 d.addCallback(lambda r: self.multiengine.get_properties())
381 380 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
382 381 d.addCallback(lambda r: self.multiengine.get_properties(('c',)))
383 382 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
384 383 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False)))
385 384 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd')))
386 385 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
387 386 return d
388 387
389 388 def testClearProperties(self):
390 389 self.addEngine(4)
391 390 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
392 391 d= self.multiengine.set_properties(dikt)
393 392 d.addCallback(lambda r: self.multiengine.clear_properties())
394 393 d.addCallback(lambda r: self.multiengine.get_properties())
395 394 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
396 395 return d
397 396
398 397 def testDelHasProperties(self):
399 398 self.addEngine(4)
400 399 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
401 400 d= self.multiengine.set_properties(dikt)
402 401 d.addCallback(lambda r: self.multiengine.del_properties(('b','e')))
403 402 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e')))
404 403 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
405 404 return d
406 405
407 406 Parametric(IMultiEngineTestCase)
408 407
409 408 #-------------------------------------------------------------------------------
410 409 # ISynchronousMultiEngineTestCase
411 410 #-------------------------------------------------------------------------------
412 411
413 412 class ISynchronousMultiEngineTestCase(IMultiEngineBaseTestCase):
414 413
415 414 def testISynchronousMultiEngineInterface(self):
416 415 """Does self.engine claim to implement IEngineCore?"""
417 416 self.assert_(me.ISynchronousEngineMultiplexer.providedBy(self.multiengine))
418 417 self.assert_(me.ISynchronousMultiEngine.providedBy(self.multiengine))
419 418
420 419 def testExecute(self):
421 420 self.addEngine(4)
422 421 execute = self.multiengine.execute
423 422 d= execute('a=5', targets=0, block=True)
424 423 d.addCallback(lambda r: self.assert_(len(r)==1))
425 424 d.addCallback(lambda _: execute('b=10'))
426 425 d.addCallback(lambda r: self.assert_(len(r)==4))
427 426 d.addCallback(lambda _: execute('c=30', block=False))
428 427 d.addCallback(lambda did: self.assert_(isdid(did)))
429 428 d.addCallback(lambda _: execute('d=[0,1,2]', block=False))
430 429 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
431 430 d.addCallback(lambda r: self.assert_(len(r)==4))
432 431 return d
433 432
434 433 def testPushPull(self):
435 434 data = dict(a=10, b=1.05, c=range(10), d={'e':(1,2),'f':'hi'})
436 435 self.addEngine(4)
437 436 push = self.multiengine.push
438 437 pull = self.multiengine.pull
439 438 d= push({'data':data}, targets=0)
440 439 d.addCallback(lambda r: pull('data', targets=0))
441 440 d.addCallback(lambda r: self.assertEqual(r,[data]))
442 441 d.addCallback(lambda _: push({'data':data}))
443 442 d.addCallback(lambda r: pull('data'))
444 443 d.addCallback(lambda r: self.assertEqual(r,4*[data]))
445 444 d.addCallback(lambda _: push({'data':data}, block=False))
446 445 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
447 446 d.addCallback(lambda _: pull('data', block=False))
448 447 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
449 448 d.addCallback(lambda r: self.assertEqual(r,4*[data]))
450 449 d.addCallback(lambda _: push(dict(a=10,b=20)))
451 450 d.addCallback(lambda _: pull(('a','b')))
452 451 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,20]]))
453 452 return d
454 453
455 454 def testPushPullFunction(self):
456 455 self.addEngine(4)
457 456 pushf = self.multiengine.push_function
458 457 pullf = self.multiengine.pull_function
459 458 push = self.multiengine.push
460 459 pull = self.multiengine.pull
461 460 execute = self.multiengine.execute
462 461 d= pushf({'testf':testf}, targets=0)
463 462 d.addCallback(lambda r: pullf('testf', targets=0))
464 463 d.addCallback(lambda r: self.assertEqual(r[0](1.0), testf(1.0)))
465 464 d.addCallback(lambda _: execute('r = testf(10)', targets=0))
466 465 d.addCallback(lambda _: pull('r', targets=0))
467 466 d.addCallback(lambda r: self.assertEquals(r[0], testf(10)))
468 467 d.addCallback(lambda _: pushf({'testf':testf}, block=False))
469 468 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
470 469 d.addCallback(lambda _: pullf('testf', block=False))
471 470 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
472 471 d.addCallback(lambda r: self.assertEqual(r[0](1.0), testf(1.0)))
473 472 d.addCallback(lambda _: execute("def g(x): return x*x", targets=0))
474 473 d.addCallback(lambda _: pullf(('testf','g'),targets=0))
475 474 d.addCallback(lambda r: self.assertEquals((r[0][0](10),r[0][1](10)), (testf(10), 100)))
476 475 return d
477 476
478 477 def testGetResult(self):
479 478 shell = Interpreter()
480 479 result1 = shell.execute('a=10')
481 480 result1['id'] = 0
482 481 result2 = shell.execute('b=20')
483 482 result2['id'] = 0
484 483 execute= self.multiengine.execute
485 484 get_result = self.multiengine.get_result
486 485 self.addEngine(1)
487 486 d= execute('a=10')
488 487 d.addCallback(lambda _: get_result())
489 488 d.addCallback(lambda r: self.assertEquals(r[0], result1))
490 489 d.addCallback(lambda _: execute('b=20'))
491 490 d.addCallback(lambda _: get_result(1))
492 491 d.addCallback(lambda r: self.assertEquals(r[0], result1))
493 492 d.addCallback(lambda _: get_result(2, block=False))
494 493 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
495 494 d.addCallback(lambda r: self.assertEquals(r[0], result2))
496 495 return d
497 496
498 497 def testResetAndKeys(self):
499 498 self.addEngine(1)
500 499
501 500 #Blocking mode
502 501 d= self.multiengine.push(dict(a=10, b=20, c=range(10)), targets=0)
503 502 d.addCallback(lambda _: self.multiengine.keys(targets=0))
504 503 def keys_found(keys):
505 504 self.assert_('a' in keys[0])
506 505 self.assert_('b' in keys[0])
507 506 self.assert_('b' in keys[0])
508 507 d.addCallback(keys_found)
509 508 d.addCallback(lambda _: self.multiengine.reset(targets=0))
510 509 d.addCallback(lambda _: self.multiengine.keys(targets=0))
511 510 def keys_not_found(keys):
512 511 self.assert_('a' not in keys[0])
513 512 self.assert_('b' not in keys[0])
514 513 self.assert_('b' not in keys[0])
515 514 d.addCallback(keys_not_found)
516 515
517 516 #Non-blocking mode
518 517 d.addCallback(lambda _: self.multiengine.push(dict(a=10, b=20, c=range(10)), targets=0))
519 518 d.addCallback(lambda _: self.multiengine.keys(targets=0, block=False))
520 519 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
521 520 def keys_found(keys):
522 521 self.assert_('a' in keys[0])
523 522 self.assert_('b' in keys[0])
524 523 self.assert_('b' in keys[0])
525 524 d.addCallback(keys_found)
526 525 d.addCallback(lambda _: self.multiengine.reset(targets=0, block=False))
527 526 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
528 527 d.addCallback(lambda _: self.multiengine.keys(targets=0, block=False))
529 528 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
530 529 def keys_not_found(keys):
531 530 self.assert_('a' not in keys[0])
532 531 self.assert_('b' not in keys[0])
533 532 self.assert_('b' not in keys[0])
534 533 d.addCallback(keys_not_found)
535 534
536 535 return d
537 536
538 537 def testPushPullSerialized(self):
539 538 self.addEngine(1)
540 539 dikt = dict(a=10,b='hi there',c=1.2345,d={'p':(1,2)})
541 540 sdikt = {}
542 541 for k,v in dikt.iteritems():
543 542 sdikt[k] = newserialized.serialize(v)
544 543 d= self.multiengine.push_serialized(dict(a=sdikt['a']), targets=0)
545 544 d.addCallback(lambda _: self.multiengine.pull('a',targets=0))
546 545 d.addCallback(lambda r: self.assertEquals(r[0], dikt['a']))
547 546 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
548 547 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
549 548 d.addCallback(lambda r: self.assertEquals(r, dikt['a']))
550 549 d.addCallback(lambda _: self.multiengine.push_serialized(sdikt, targets=0))
551 550 d.addCallback(lambda _: self.multiengine.pull_serialized(sdikt.keys(), targets=0))
552 551 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
553 552 d.addCallback(lambda r: self.assertEquals(r, dikt.values()))
554 553 d.addCallback(lambda _: self.multiengine.reset(targets=0))
555 554 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0))
556 555 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
557 556
558 557 #Non-blocking mode
559 558 d.addCallback(lambda r: self.multiengine.push_serialized(dict(a=sdikt['a']), targets=0, block=False))
560 559 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
561 560 d.addCallback(lambda _: self.multiengine.pull('a',targets=0))
562 561 d.addCallback(lambda r: self.assertEquals(r[0], dikt['a']))
563 562 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0, block=False))
564 563 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
565 564 d.addCallback(lambda serial: newserialized.IUnSerialized(serial[0]).getObject())
566 565 d.addCallback(lambda r: self.assertEquals(r, dikt['a']))
567 566 d.addCallback(lambda _: self.multiengine.push_serialized(sdikt, targets=0, block=False))
568 567 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
569 568 d.addCallback(lambda _: self.multiengine.pull_serialized(sdikt.keys(), targets=0, block=False))
570 569 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
571 570 d.addCallback(lambda serial: [newserialized.IUnSerialized(s).getObject() for s in serial[0]])
572 571 d.addCallback(lambda r: self.assertEquals(r, dikt.values()))
573 572 d.addCallback(lambda _: self.multiengine.reset(targets=0))
574 573 d.addCallback(lambda _: self.multiengine.pull_serialized('a', targets=0, block=False))
575 574 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
576 575 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
577 576 return d
578 577
579 578 def testClearQueue(self):
580 579 self.addEngine(4)
581 580 d= self.multiengine.clear_queue()
582 581 d.addCallback(lambda r: self.multiengine.clear_queue(block=False))
583 582 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
584 583 d.addCallback(lambda r: self.assertEquals(r,4*[None]))
585 584 return d
586 585
587 586 def testQueueStatus(self):
588 587 self.addEngine(4)
589 588 d= self.multiengine.queue_status(targets=0)
590 589 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
591 590 d.addCallback(lambda r: self.multiengine.queue_status(targets=0, block=False))
592 591 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
593 592 d.addCallback(lambda r: self.assert_(isinstance(r[0],tuple)))
594 593 return d
595 594
596 595 def testGetIDs(self):
597 596 self.addEngine(1)
598 597 d= self.multiengine.get_ids()
599 598 d.addCallback(lambda r: self.assertEquals(r, [0]))
600 599 d.addCallback(lambda _: self.addEngine(3))
601 600 d.addCallback(lambda _: self.multiengine.get_ids())
602 601 d.addCallback(lambda r: self.assertEquals(r, [0,1,2,3]))
603 602 return d
604 603
605 604 def testGetSetProperties(self):
606 605 self.addEngine(4)
607 606 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
608 607 d= self.multiengine.set_properties(dikt)
609 608 d.addCallback(lambda r: self.multiengine.get_properties())
610 609 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
611 610 d.addCallback(lambda r: self.multiengine.get_properties(('c',)))
612 611 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
613 612 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False)))
614 613 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd')))
615 614 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
616 615
617 616 #Non-blocking
618 617 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
619 618 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
620 619 d.addCallback(lambda r: self.multiengine.get_properties(block=False))
621 620 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
622 621 d.addCallback(lambda r: self.assertEquals(r, 4*[dikt]))
623 622 d.addCallback(lambda r: self.multiengine.get_properties(('c',), block=False))
624 623 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
625 624 d.addCallback(lambda r: self.assertEquals(r, 4*[{'c': dikt['c']}]))
626 625 d.addCallback(lambda r: self.multiengine.set_properties(dict(c=False), block=False))
627 626 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
628 627 d.addCallback(lambda r: self.multiengine.get_properties(('c', 'd'), block=False))
629 628 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
630 629 d.addCallback(lambda r: self.assertEquals(r, 4*[dict(c=False, d=None)]))
631 630 return d
632 631
633 632 def testClearProperties(self):
634 633 self.addEngine(4)
635 634 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
636 635 d= self.multiengine.set_properties(dikt)
637 636 d.addCallback(lambda r: self.multiengine.clear_properties())
638 637 d.addCallback(lambda r: self.multiengine.get_properties())
639 638 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
640 639
641 640 #Non-blocking
642 641 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
643 642 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
644 643 d.addCallback(lambda r: self.multiengine.clear_properties(block=False))
645 644 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
646 645 d.addCallback(lambda r: self.multiengine.get_properties(block=False))
647 646 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
648 647 d.addCallback(lambda r: self.assertEquals(r, 4*[{}]))
649 648 return d
650 649
651 650 def testDelHasProperties(self):
652 651 self.addEngine(4)
653 652 dikt = dict(a=5, b='asdf', c=True, d=None, e=range(5))
654 653 d= self.multiengine.set_properties(dikt)
655 654 d.addCallback(lambda r: self.multiengine.del_properties(('b','e')))
656 655 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e')))
657 656 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
658 657
659 658 #Non-blocking
660 659 d.addCallback(lambda r: self.multiengine.set_properties(dikt, block=False))
661 660 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
662 661 d.addCallback(lambda r: self.multiengine.del_properties(('b','e'), block=False))
663 662 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
664 663 d.addCallback(lambda r: self.multiengine.has_properties(('a','b','c','d','e'), block=False))
665 664 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
666 665 d.addCallback(lambda r: self.assertEquals(r, 4*[[True, False, True, True, False]]))
667 666 return d
668 667
669 668 def test_clear_pending_deferreds(self):
670 669 self.addEngine(4)
671 670 did_list = []
672 671 d= self.multiengine.execute('a=10',block=False)
673 672 d.addCallback(lambda did: did_list.append(did))
674 673 d.addCallback(lambda _: self.multiengine.push(dict(b=10),block=False))
675 674 d.addCallback(lambda did: did_list.append(did))
676 675 d.addCallback(lambda _: self.multiengine.pull(('a','b'),block=False))
677 676 d.addCallback(lambda did: did_list.append(did))
678 677 d.addCallback(lambda _: self.multiengine.clear_pending_deferreds())
679 678 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[0],True))
680 679 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
681 680 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[1],True))
682 681 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
683 682 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[2],True))
684 683 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
685 684 return d
686 685
687 686 #-------------------------------------------------------------------------------
688 687 # Coordinator test cases
689 688 #-------------------------------------------------------------------------------
690 689
691 690 class IMultiEngineCoordinatorTestCase(object):
692 691
693 692 def testScatterGather(self):
694 693 self.addEngine(4)
695 694 d= self.multiengine.scatter('a', range(16))
696 695 d.addCallback(lambda r: self.multiengine.gather('a'))
697 696 d.addCallback(lambda r: self.assertEquals(r, range(16)))
698 697 d.addCallback(lambda _: self.multiengine.gather('asdf'))
699 698 d.addErrback(lambda f: self.assertRaises(NameError, _raise_it, f))
700 699 return d
701 700
702 701 def testScatterGatherNumpy(self):
703 702 try:
704 703 import numpy
705 704 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
706 705 except:
707 706 return
708 707 else:
709 708 self.addEngine(4)
710 709 a = numpy.arange(16)
711 710 d = self.multiengine.scatter('a', a)
712 711 d.addCallback(lambda r: self.multiengine.gather('a'))
713 712 d.addCallback(lambda r: assert_array_equal(r, a))
714 713 return d
715 714
716 715 def testMap(self):
717 716 self.addEngine(4)
718 717 def f(x):
719 718 return x**2
720 719 data = range(16)
721 720 d= self.multiengine.map(f, data)
722 721 d.addCallback(lambda r: self.assertEquals(r,[f(x) for x in data]))
723 722 return d
724 723
725 724
726 725 class ISynchronousMultiEngineCoordinatorTestCase(IMultiEngineCoordinatorTestCase):
727 726
728 727 def testScatterGatherNonblocking(self):
729 728 self.addEngine(4)
730 729 d= self.multiengine.scatter('a', range(16), block=False)
731 730 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
732 731 d.addCallback(lambda r: self.multiengine.gather('a', block=False))
733 732 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
734 733 d.addCallback(lambda r: self.assertEquals(r, range(16)))
735 734 return d
736 735
737 736 def testScatterGatherNumpyNonblocking(self):
738 737 try:
739 738 import numpy
740 739 from numpy.testing.utils import assert_array_equal, assert_array_almost_equal
741 740 except:
742 741 return
743 742 else:
744 743 self.addEngine(4)
745 744 a = numpy.arange(16)
746 745 d = self.multiengine.scatter('a', a, block=False)
747 746 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
748 747 d.addCallback(lambda r: self.multiengine.gather('a', block=False))
749 748 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
750 749 d.addCallback(lambda r: assert_array_equal(r, a))
751 750 return d
752 751
753 752 def test_clear_pending_deferreds(self):
754 753 self.addEngine(4)
755 754 did_list = []
756 755 d= self.multiengine.scatter('a',range(16),block=False)
757 756 d.addCallback(lambda did: did_list.append(did))
758 757 d.addCallback(lambda _: self.multiengine.gather('a',block=False))
759 758 d.addCallback(lambda did: did_list.append(did))
760 759 d.addCallback(lambda _: self.multiengine.map(lambda x: x, range(16),block=False))
761 760 d.addCallback(lambda did: did_list.append(did))
762 761 d.addCallback(lambda _: self.multiengine.clear_pending_deferreds())
763 762 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[0],True))
764 763 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
765 764 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[1],True))
766 765 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
767 766 d.addCallback(lambda _: self.multiengine.get_pending_deferred(did_list[2],True))
768 767 d.addErrback(lambda f: self.assertRaises(InvalidDeferredID, f.raiseException))
769 768 return d
770 769
771 770 #-------------------------------------------------------------------------------
772 771 # Extras test cases
773 772 #-------------------------------------------------------------------------------
774 773
775 774 class IMultiEngineExtrasTestCase(object):
776 775
777 776 def testZipPull(self):
778 777 self.addEngine(4)
779 778 d= self.multiengine.push(dict(a=10,b=20))
780 779 d.addCallback(lambda r: self.multiengine.zip_pull(('a','b')))
781 780 d.addCallback(lambda r: self.assert_(r, [4*[10],4*[20]]))
782 781 return d
783 782
784 783 def testRun(self):
785 784 self.addEngine(4)
786 785 import tempfile
787 786 fname = tempfile.mktemp('foo.py')
788 787 f= open(fname, 'w')
789 788 f.write('a = 10\nb=30')
790 789 f.close()
791 790 d= self.multiengine.run(fname)
792 791 d.addCallback(lambda r: self.multiengine.pull(('a','b')))
793 792 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,30]]))
794 793 return d
795 794
796 795
797 796 class ISynchronousMultiEngineExtrasTestCase(IMultiEngineExtrasTestCase):
798 797
799 798 def testZipPullNonblocking(self):
800 799 self.addEngine(4)
801 800 d= self.multiengine.push(dict(a=10,b=20))
802 801 d.addCallback(lambda r: self.multiengine.zip_pull(('a','b'), block=False))
803 802 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
804 803 d.addCallback(lambda r: self.assert_(r, [4*[10],4*[20]]))
805 804 return d
806 805
807 806 def testRunNonblocking(self):
808 807 self.addEngine(4)
809 808 import tempfile
810 809 fname = tempfile.mktemp('foo.py')
811 810 f= open(fname, 'w')
812 811 f.write('a = 10\nb=30')
813 812 f.close()
814 813 d= self.multiengine.run(fname, block=False)
815 814 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
816 815 d.addCallback(lambda r: self.multiengine.pull(('a','b')))
817 816 d.addCallback(lambda r: self.assertEquals(r, 4*[[10,30]]))
818 817 return d
819 818
820 819
821 820 #-------------------------------------------------------------------------------
822 821 # IFullSynchronousMultiEngineTestCase
823 822 #-------------------------------------------------------------------------------
824 823
825 824 class IFullSynchronousMultiEngineTestCase(ISynchronousMultiEngineTestCase,
826 825 ISynchronousMultiEngineCoordinatorTestCase,
827 826 ISynchronousMultiEngineExtrasTestCase):
828 827 pass
@@ -1,41 +1,43
1 from __future__ import with_statement
1 #from __future__ import with_statement
2
3 # XXX This file is currently disabled to preserve 2.4 compatibility.
2 4
3 5 #def test_simple():
4 6 if 0:
5 7
6 8 # XXX - for now, we need a running cluster to be started separately. The
7 9 # daemon work is almost finished, and will make much of this unnecessary.
8 10 from IPython.kernel import client
9 11 mec = client.MultiEngineClient(('127.0.0.1',10105))
10 12
11 13 try:
12 14 mec.get_ids()
13 15 except ConnectionRefusedError:
14 16 import os, time
15 17 os.system('ipcluster -n 2 &')
16 18 time.sleep(2)
17 19 mec = client.MultiEngineClient(('127.0.0.1',10105))
18 20
19 21 mec.block = False
20 22
21 23 import itertools
22 24 c = itertools.count()
23 25
24 26 parallel = RemoteMultiEngine(mec)
25 27
26 28 mec.pushAll()
27 29
28 with parallel as pr:
29 # A comment
30 remote() # this means the code below only runs remotely
31 print 'Hello remote world'
32 x = range(10)
33 # Comments are OK
34 # Even misindented.
35 y = x+1
30 ## with parallel as pr:
31 ## # A comment
32 ## remote() # this means the code below only runs remotely
33 ## print 'Hello remote world'
34 ## x = range(10)
35 ## # Comments are OK
36 ## # Even misindented.
37 ## y = x+1
36 38
37 39
38 with pfor('i',sequence) as pr:
39 print x[i]
40 ## with pfor('i',sequence) as pr:
41 ## print x[i]
40 42
41 43 print pr.x + pr.y
@@ -1,43 +1,44
1 1 # encoding: utf-8
2 2
3 3 """This file contains unittests for the kernel.engineservice.py module.
4 4
5 5 Things that should be tested:
6 6
7 7 - Should the EngineService return Deferred objects?
8 8 - Run the same tests that are run in shell.py.
9 9 - Make sure that the Interface is really implemented.
10 10 - The startService and stopService methods.
11 11 """
12 12
13 13 __docformat__ = "restructuredtext en"
14 14
15 15 #-------------------------------------------------------------------------------
16 16 # Copyright (C) 2008 The IPython Development Team
17 17 #
18 18 # Distributed under the terms of the BSD License. The full license is in
19 19 # the file COPYING, distributed as part of this software.
20 20 #-------------------------------------------------------------------------------
21 21
22 22 #-------------------------------------------------------------------------------
23 23 # Imports
24 24 #-------------------------------------------------------------------------------
25 25
26 26 try:
27 27 from twisted.application.service import IService
28 28 from IPython.kernel.controllerservice import ControllerService
29 29 from IPython.kernel.tests import multienginetest as met
30 30 from controllertest import IControllerCoreTestCase
31 31 from IPython.testing.util import DeferredTestCase
32 32 except ImportError:
33 pass
34 else:
35 class BasicControllerServiceTest(DeferredTestCase,
36 IControllerCoreTestCase):
37
38 def setUp(self):
39 self.controller = ControllerService()
40 self.controller.startService()
41
42 def tearDown(self):
43 self.controller.stopService()
33 import nose
34 raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap")
35
36 class BasicControllerServiceTest(DeferredTestCase,
37 IControllerCoreTestCase):
38
39 def setUp(self):
40 self.controller = ControllerService()
41 self.controller.startService()
42
43 def tearDown(self):
44 self.controller.stopService()
@@ -1,92 +1,93
1 1 # encoding: utf-8
2 2
3 3 """This file contains unittests for the enginepb.py module."""
4 4
5 5 __docformat__ = "restructuredtext en"
6 6
7 7 #-------------------------------------------------------------------------------
8 8 # Copyright (C) 2008 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-------------------------------------------------------------------------------
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Imports
16 16 #-------------------------------------------------------------------------------
17 17
18 18 try:
19 19 from twisted.python import components
20 20 from twisted.internet import reactor, defer
21 21 from twisted.spread import pb
22 22 from twisted.internet.base import DelayedCall
23 23 DelayedCall.debug = True
24 24
25 25 import zope.interface as zi
26 26
27 27 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
28 28 from IPython.kernel import engineservice as es
29 29 from IPython.testing.util import DeferredTestCase
30 30 from IPython.kernel.controllerservice import IControllerBase
31 31 from IPython.kernel.enginefc import FCRemoteEngineRefFromService, IEngineBase
32 32 from IPython.kernel.engineservice import IEngineQueued
33 33 from IPython.kernel.engineconnector import EngineConnector
34 34
35 35 from IPython.kernel.tests.engineservicetest import \
36 36 IEngineCoreTestCase, \
37 37 IEngineSerializedTestCase, \
38 38 IEngineQueuedTestCase
39 39 except ImportError:
40 print "we got an error!!!"
41 raise
42 else:
43 class EngineFCTest(DeferredTestCase,
44 IEngineCoreTestCase,
45 IEngineSerializedTestCase,
46 IEngineQueuedTestCase
47 ):
48
49 zi.implements(IControllerBase)
50
51 def setUp(self):
52
53 # Start a server and append to self.servers
54 self.controller_reference = FCRemoteEngineRefFromService(self)
55 self.controller_tub = Tub()
56 self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1')
57 self.controller_tub.setLocation('127.0.0.1:10105')
58
59 furl = self.controller_tub.registerReference(self.controller_reference)
60 self.controller_tub.startService()
61
62 # Start an EngineService and append to services/client
63 self.engine_service = es.EngineService()
64 self.engine_service.startService()
65 self.engine_tub = Tub()
66 self.engine_tub.startService()
67 engine_connector = EngineConnector(self.engine_tub)
68 d = engine_connector.connect_to_controller(self.engine_service, furl)
69 # This deferred doesn't fire until after register_engine has returned and
70 # thus, self.engine has been defined and the tets can proceed.
71 return d
72
73 def tearDown(self):
74 dlist = []
75 # Shut down the engine
76 d = self.engine_tub.stopService()
77 dlist.append(d)
78 # Shut down the controller
79 d = self.controller_tub.stopService()
80 dlist.append(d)
81 return defer.DeferredList(dlist)
82
83 #---------------------------------------------------------------------------
84 # Make me look like a basic controller
85 #---------------------------------------------------------------------------
86
87 def register_engine(self, engine_ref, id=None, ip=None, port=None, pid=None):
88 self.engine = IEngineQueued(IEngineBase(engine_ref))
89 return {'id':id}
90
91 def unregister_engine(self, id):
92 pass No newline at end of file
40 import nose
41 raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap")
42
43
44 class EngineFCTest(DeferredTestCase,
45 IEngineCoreTestCase,
46 IEngineSerializedTestCase,
47 IEngineQueuedTestCase
48 ):
49
50 zi.implements(IControllerBase)
51
52 def setUp(self):
53
54 # Start a server and append to self.servers
55 self.controller_reference = FCRemoteEngineRefFromService(self)
56 self.controller_tub = Tub()
57 self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1')
58 self.controller_tub.setLocation('127.0.0.1:10105')
59
60 furl = self.controller_tub.registerReference(self.controller_reference)
61 self.controller_tub.startService()
62
63 # Start an EngineService and append to services/client
64 self.engine_service = es.EngineService()
65 self.engine_service.startService()
66 self.engine_tub = Tub()
67 self.engine_tub.startService()
68 engine_connector = EngineConnector(self.engine_tub)
69 d = engine_connector.connect_to_controller(self.engine_service, furl)
70 # This deferred doesn't fire until after register_engine has returned and
71 # thus, self.engine has been defined and the tets can proceed.
72 return d
73
74 def tearDown(self):
75 dlist = []
76 # Shut down the engine
77 d = self.engine_tub.stopService()
78 dlist.append(d)
79 # Shut down the controller
80 d = self.controller_tub.stopService()
81 dlist.append(d)
82 return defer.DeferredList(dlist)
83
84 #---------------------------------------------------------------------------
85 # Make me look like a basic controller
86 #---------------------------------------------------------------------------
87
88 def register_engine(self, engine_ref, id=None, ip=None, port=None, pid=None):
89 self.engine = IEngineQueued(IEngineBase(engine_ref))
90 return {'id':id}
91
92 def unregister_engine(self, id):
93 pass No newline at end of file
@@ -1,78 +1,80
1 1 # encoding: utf-8
2 2
3 3 """This file contains unittests for the kernel.engineservice.py module.
4 4
5 5 Things that should be tested:
6 6
7 7 - Should the EngineService return Deferred objects?
8 8 - Run the same tests that are run in shell.py.
9 9 - Make sure that the Interface is really implemented.
10 10 - The startService and stopService methods.
11 11 """
12 12
13 13 __docformat__ = "restructuredtext en"
14 14
15 15 #-------------------------------------------------------------------------------
16 16 # Copyright (C) 2008 The IPython Development Team
17 17 #
18 18 # Distributed under the terms of the BSD License. The full license is in
19 19 # the file COPYING, distributed as part of this software.
20 20 #-------------------------------------------------------------------------------
21 21
22 22 #-------------------------------------------------------------------------------
23 23 # Imports
24 24 #-------------------------------------------------------------------------------
25 25
26 26 try:
27 27 from twisted.internet import defer
28 28 from twisted.application.service import IService
29 29
30 30 from IPython.kernel import engineservice as es
31 31 from IPython.testing.util import DeferredTestCase
32 32 from IPython.kernel.tests.engineservicetest import \
33 33 IEngineCoreTestCase, \
34 34 IEngineSerializedTestCase, \
35 35 IEngineQueuedTestCase, \
36 36 IEnginePropertiesTestCase
37 37 except ImportError:
38 pass
39 else:
40 class BasicEngineServiceTest(DeferredTestCase,
41 IEngineCoreTestCase,
42 IEngineSerializedTestCase,
43 IEnginePropertiesTestCase):
44
45 def setUp(self):
46 self.engine = es.EngineService()
47 self.engine.startService()
48
49 def tearDown(self):
50 return self.engine.stopService()
51
52 class ThreadedEngineServiceTest(DeferredTestCase,
53 IEngineCoreTestCase,
54 IEngineSerializedTestCase,
55 IEnginePropertiesTestCase):
56
57 def setUp(self):
58 self.engine = es.ThreadedEngineService()
59 self.engine.startService()
38 import nose
39 raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap")
40
41
42 class BasicEngineServiceTest(DeferredTestCase,
43 IEngineCoreTestCase,
44 IEngineSerializedTestCase,
45 IEnginePropertiesTestCase):
46
47 def setUp(self):
48 self.engine = es.EngineService()
49 self.engine.startService()
50
51 def tearDown(self):
52 return self.engine.stopService()
53
54 class ThreadedEngineServiceTest(DeferredTestCase,
55 IEngineCoreTestCase,
56 IEngineSerializedTestCase,
57 IEnginePropertiesTestCase):
58
59 def setUp(self):
60 self.engine = es.ThreadedEngineService()
61 self.engine.startService()
62
63 def tearDown(self):
64 return self.engine.stopService()
65
66 class QueuedEngineServiceTest(DeferredTestCase,
67 IEngineCoreTestCase,
68 IEngineSerializedTestCase,
69 IEnginePropertiesTestCase,
70 IEngineQueuedTestCase):
71
72 def setUp(self):
73 self.rawEngine = es.EngineService()
74 self.rawEngine.startService()
75 self.engine = es.IEngineQueued(self.rawEngine)
60 76
61 def tearDown(self):
62 return self.engine.stopService()
63
64 class QueuedEngineServiceTest(DeferredTestCase,
65 IEngineCoreTestCase,
66 IEngineSerializedTestCase,
67 IEnginePropertiesTestCase,
68 IEngineQueuedTestCase):
69
70 def setUp(self):
71 self.rawEngine = es.EngineService()
72 self.rawEngine.startService()
73 self.engine = es.IEngineQueued(self.rawEngine)
74
75 def tearDown(self):
76 return self.rawEngine.stopService()
77
78
77 def tearDown(self):
78 return self.rawEngine.stopService()
79
80
@@ -1,54 +1,56
1 1 # encoding: utf-8
2 2
3 3 """"""
4 4
5 5 __docformat__ = "restructuredtext en"
6 6
7 7 #-------------------------------------------------------------------------------
8 8 # Copyright (C) 2008 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-------------------------------------------------------------------------------
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Imports
16 16 #-------------------------------------------------------------------------------
17 17
18 18 try:
19 19 from twisted.internet import defer
20 20 from IPython.testing.util import DeferredTestCase
21 21 from IPython.kernel.controllerservice import ControllerService
22 22 from IPython.kernel import multiengine as me
23 23 from IPython.kernel.tests.multienginetest import (IMultiEngineTestCase,
24 24 ISynchronousMultiEngineTestCase)
25 25 except ImportError:
26 pass
27 else:
28 class BasicMultiEngineTestCase(DeferredTestCase, IMultiEngineTestCase):
26 import nose
27 raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap")
28
29
30 class BasicMultiEngineTestCase(DeferredTestCase, IMultiEngineTestCase):
31
32 def setUp(self):
33 self.controller = ControllerService()
34 self.controller.startService()
35 self.multiengine = me.IMultiEngine(self.controller)
36 self.engines = []
29 37
30 def setUp(self):
31 self.controller = ControllerService()
32 self.controller.startService()
33 self.multiengine = me.IMultiEngine(self.controller)
34 self.engines = []
35
36 def tearDown(self):
37 self.controller.stopService()
38 for e in self.engines:
39 e.stopService()
40
41
42 class SynchronousMultiEngineTestCase(DeferredTestCase, ISynchronousMultiEngineTestCase):
38 def tearDown(self):
39 self.controller.stopService()
40 for e in self.engines:
41 e.stopService()
42
43
44 class SynchronousMultiEngineTestCase(DeferredTestCase, ISynchronousMultiEngineTestCase):
45
46 def setUp(self):
47 self.controller = ControllerService()
48 self.controller.startService()
49 self.multiengine = me.ISynchronousMultiEngine(me.IMultiEngine(self.controller))
50 self.engines = []
43 51
44 def setUp(self):
45 self.controller = ControllerService()
46 self.controller.startService()
47 self.multiengine = me.ISynchronousMultiEngine(me.IMultiEngine(self.controller))
48 self.engines = []
49
50 def tearDown(self):
51 self.controller.stopService()
52 for e in self.engines:
53 e.stopService()
52 def tearDown(self):
53 self.controller.stopService()
54 for e in self.engines:
55 e.stopService()
54 56
@@ -1,144 +1,144
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3
4 4 __docformat__ = "restructuredtext en"
5 5
6 6 #-------------------------------------------------------------------------------
7 7 # Copyright (C) 2008 The IPython Development Team
8 8 #
9 9 # Distributed under the terms of the BSD License. The full license is in
10 10 # the file COPYING, distributed as part of this software.
11 11 #-------------------------------------------------------------------------------
12 12
13 13 #-------------------------------------------------------------------------------
14 14 # Imports
15 15 #-------------------------------------------------------------------------------
16 16
17 17 try:
18 18 from twisted.internet import defer, reactor
19 19
20 20 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
21 21
22 22 from IPython.testing.util import DeferredTestCase
23 23 from IPython.kernel.controllerservice import ControllerService
24 24 from IPython.kernel.multiengine import IMultiEngine
25 25 from IPython.kernel.tests.multienginetest import IFullSynchronousMultiEngineTestCase
26 26 from IPython.kernel.multienginefc import IFCSynchronousMultiEngine
27 27 from IPython.kernel import multiengine as me
28 28 from IPython.kernel.clientconnector import ClientConnector
29 29 from IPython.kernel.parallelfunction import ParallelFunction
30 30 from IPython.kernel.error import CompositeError
31 31 from IPython.kernel.util import printer
32 32 except ImportError:
33 pass
34 else:
33 import nose
34 raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap")
35 35
36 def _raise_it(f):
37 try:
38 f.raiseException()
39 except CompositeError, e:
40 e.raise_exception()
36 def _raise_it(f):
37 try:
38 f.raiseException()
39 except CompositeError, e:
40 e.raise_exception()
41
42
43 class FullSynchronousMultiEngineTestCase(DeferredTestCase, IFullSynchronousMultiEngineTestCase):
44
45 def setUp(self):
41 46
47 self.engines = []
48
49 self.controller = ControllerService()
50 self.controller.startService()
51 self.imultiengine = IMultiEngine(self.controller)
52 self.mec_referenceable = IFCSynchronousMultiEngine(self.imultiengine)
53
54 self.controller_tub = Tub()
55 self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1')
56 self.controller_tub.setLocation('127.0.0.1:10105')
42 57
43 class FullSynchronousMultiEngineTestCase(DeferredTestCase, IFullSynchronousMultiEngineTestCase):
58 furl = self.controller_tub.registerReference(self.mec_referenceable)
59 self.controller_tub.startService()
44 60
45 def setUp(self):
46
47 self.engines = []
48
49 self.controller = ControllerService()
50 self.controller.startService()
51 self.imultiengine = IMultiEngine(self.controller)
52 self.mec_referenceable = IFCSynchronousMultiEngine(self.imultiengine)
53
54 self.controller_tub = Tub()
55 self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1')
56 self.controller_tub.setLocation('127.0.0.1:10105')
57
58 furl = self.controller_tub.registerReference(self.mec_referenceable)
59 self.controller_tub.startService()
60
61 self.client_tub = ClientConnector()
62 d = self.client_tub.get_multiengine_client(furl)
63 d.addCallback(self.handle_got_client)
64 return d
65
66 def handle_got_client(self, client):
67 self.multiengine = client
61 self.client_tub = ClientConnector()
62 d = self.client_tub.get_multiengine_client(furl)
63 d.addCallback(self.handle_got_client)
64 return d
68 65
69 def tearDown(self):
70 dlist = []
71 # Shut down the multiengine client
72 d = self.client_tub.tub.stopService()
73 dlist.append(d)
74 # Shut down the engines
75 for e in self.engines:
76 e.stopService()
77 # Shut down the controller
78 d = self.controller_tub.stopService()
79 d.addBoth(lambda _: self.controller.stopService())
80 dlist.append(d)
81 return defer.DeferredList(dlist)
66 def handle_got_client(self, client):
67 self.multiengine = client
68
69 def tearDown(self):
70 dlist = []
71 # Shut down the multiengine client
72 d = self.client_tub.tub.stopService()
73 dlist.append(d)
74 # Shut down the engines
75 for e in self.engines:
76 e.stopService()
77 # Shut down the controller
78 d = self.controller_tub.stopService()
79 d.addBoth(lambda _: self.controller.stopService())
80 dlist.append(d)
81 return defer.DeferredList(dlist)
82 82
83 def test_mapper(self):
84 self.addEngine(4)
85 m = self.multiengine.mapper()
86 self.assertEquals(m.multiengine,self.multiengine)
87 self.assertEquals(m.dist,'b')
88 self.assertEquals(m.targets,'all')
89 self.assertEquals(m.block,True)
90
91 def test_map_default(self):
92 self.addEngine(4)
93 m = self.multiengine.mapper()
94 d = m.map(lambda x: 2*x, range(10))
95 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
96 d.addCallback(lambda _: self.multiengine.map(lambda x: 2*x, range(10)))
97 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
98 return d
99
100 def test_map_noblock(self):
101 self.addEngine(4)
102 m = self.multiengine.mapper(block=False)
103 d = m.map(lambda x: 2*x, range(10))
104 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
105 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
106 return d
107
108 def test_mapper_fail(self):
109 self.addEngine(4)
110 m = self.multiengine.mapper()
111 d = m.map(lambda x: 1/0, range(10))
112 d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f))
113 return d
114
115 def test_parallel(self):
116 self.addEngine(4)
117 p = self.multiengine.parallel()
118 self.assert_(isinstance(p, ParallelFunction))
119 @p
120 def f(x): return 2*x
121 d = f(range(10))
122 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
123 return d
124
125 def test_parallel_noblock(self):
126 self.addEngine(1)
127 p = self.multiengine.parallel(block=False)
128 self.assert_(isinstance(p, ParallelFunction))
129 @p
130 def f(x): return 2*x
131 d = f(range(10))
132 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
133 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
134 return d
135
136 def test_parallel_fail(self):
137 self.addEngine(4)
138 p = self.multiengine.parallel()
139 self.assert_(isinstance(p, ParallelFunction))
140 @p
141 def f(x): return 1/0
142 d = f(range(10))
143 d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f))
144 return d No newline at end of file
83 def test_mapper(self):
84 self.addEngine(4)
85 m = self.multiengine.mapper()
86 self.assertEquals(m.multiengine,self.multiengine)
87 self.assertEquals(m.dist,'b')
88 self.assertEquals(m.targets,'all')
89 self.assertEquals(m.block,True)
90
91 def test_map_default(self):
92 self.addEngine(4)
93 m = self.multiengine.mapper()
94 d = m.map(lambda x: 2*x, range(10))
95 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
96 d.addCallback(lambda _: self.multiengine.map(lambda x: 2*x, range(10)))
97 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
98 return d
99
100 def test_map_noblock(self):
101 self.addEngine(4)
102 m = self.multiengine.mapper(block=False)
103 d = m.map(lambda x: 2*x, range(10))
104 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
105 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
106 return d
107
108 def test_mapper_fail(self):
109 self.addEngine(4)
110 m = self.multiengine.mapper()
111 d = m.map(lambda x: 1/0, range(10))
112 d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f))
113 return d
114
115 def test_parallel(self):
116 self.addEngine(4)
117 p = self.multiengine.parallel()
118 self.assert_(isinstance(p, ParallelFunction))
119 @p
120 def f(x): return 2*x
121 d = f(range(10))
122 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
123 return d
124
125 def test_parallel_noblock(self):
126 self.addEngine(1)
127 p = self.multiengine.parallel(block=False)
128 self.assert_(isinstance(p, ParallelFunction))
129 @p
130 def f(x): return 2*x
131 d = f(range(10))
132 d.addCallback(lambda did: self.multiengine.get_pending_deferred(did, True))
133 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
134 return d
135
136 def test_parallel_fail(self):
137 self.addEngine(4)
138 p = self.multiengine.parallel()
139 self.assert_(isinstance(p, ParallelFunction))
140 @p
141 def f(x): return 1/0
142 d = f(range(10))
143 d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f))
144 return d No newline at end of file
@@ -1,102 +1,102
1 1 # encoding: utf-8
2 2
3 3 """This file contains unittests for the shell.py module."""
4 4
5 5 __docformat__ = "restructuredtext en"
6 6
7 7 #-------------------------------------------------------------------------------
8 8 # Copyright (C) 2008 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-------------------------------------------------------------------------------
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Imports
16 16 #-------------------------------------------------------------------------------
17 17
18 18 try:
19 19 import zope.interface as zi
20 20 from twisted.trial import unittest
21 21 from IPython.testing.util import DeferredTestCase
22 22
23 23 from IPython.kernel.newserialized import \
24 24 ISerialized, \
25 25 IUnSerialized, \
26 26 Serialized, \
27 27 UnSerialized, \
28 28 SerializeIt, \
29 29 UnSerializeIt
30 30 except ImportError:
31 pass
32 else:
33 #-------------------------------------------------------------------------------
34 # Tests
35 #-------------------------------------------------------------------------------
31 import nose
32 raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap")
33
34 #-------------------------------------------------------------------------------
35 # Tests
36 #-------------------------------------------------------------------------------
37
38 class SerializedTestCase(unittest.TestCase):
39
40 def setUp(self):
41 pass
36 42
37 class SerializedTestCase(unittest.TestCase):
43 def tearDown(self):
44 pass
38 45
39 def setUp(self):
40 pass
41
42 def tearDown(self):
43 pass
44
45 def testSerializedInterfaces(self):
46 def testSerializedInterfaces(self):
46 47
47 us = UnSerialized({'a':10, 'b':range(10)})
48 s = ISerialized(us)
49 uss = IUnSerialized(s)
50 self.assert_(ISerialized.providedBy(s))
51 self.assert_(IUnSerialized.providedBy(us))
52 self.assert_(IUnSerialized.providedBy(uss))
53 for m in list(ISerialized):
54 self.assert_(hasattr(s, m))
55 for m in list(IUnSerialized):
56 self.assert_(hasattr(us, m))
57 for m in list(IUnSerialized):
58 self.assert_(hasattr(uss, m))
48 us = UnSerialized({'a':10, 'b':range(10)})
49 s = ISerialized(us)
50 uss = IUnSerialized(s)
51 self.assert_(ISerialized.providedBy(s))
52 self.assert_(IUnSerialized.providedBy(us))
53 self.assert_(IUnSerialized.providedBy(uss))
54 for m in list(ISerialized):
55 self.assert_(hasattr(s, m))
56 for m in list(IUnSerialized):
57 self.assert_(hasattr(us, m))
58 for m in list(IUnSerialized):
59 self.assert_(hasattr(uss, m))
59 60
60 def testPickleSerialized(self):
61 obj = {'a':1.45345, 'b':'asdfsdf', 'c':10000L}
62 original = UnSerialized(obj)
63 originalSer = ISerialized(original)
64 firstData = originalSer.getData()
65 firstTD = originalSer.getTypeDescriptor()
66 firstMD = originalSer.getMetadata()
67 self.assert_(firstTD == 'pickle')
68 self.assert_(firstMD == {})
69 unSerialized = IUnSerialized(originalSer)
70 secondObj = unSerialized.getObject()
71 for k, v in secondObj.iteritems():
72 self.assert_(obj[k] == v)
73 secondSer = ISerialized(UnSerialized(secondObj))
74 self.assert_(firstData == secondSer.getData())
75 self.assert_(firstTD == secondSer.getTypeDescriptor() )
76 self.assert_(firstMD == secondSer.getMetadata())
61 def testPickleSerialized(self):
62 obj = {'a':1.45345, 'b':'asdfsdf', 'c':10000L}
63 original = UnSerialized(obj)
64 originalSer = ISerialized(original)
65 firstData = originalSer.getData()
66 firstTD = originalSer.getTypeDescriptor()
67 firstMD = originalSer.getMetadata()
68 self.assert_(firstTD == 'pickle')
69 self.assert_(firstMD == {})
70 unSerialized = IUnSerialized(originalSer)
71 secondObj = unSerialized.getObject()
72 for k, v in secondObj.iteritems():
73 self.assert_(obj[k] == v)
74 secondSer = ISerialized(UnSerialized(secondObj))
75 self.assert_(firstData == secondSer.getData())
76 self.assert_(firstTD == secondSer.getTypeDescriptor() )
77 self.assert_(firstMD == secondSer.getMetadata())
78
79 def testNDArraySerialized(self):
80 try:
81 import numpy
82 except ImportError:
83 pass
84 else:
85 a = numpy.linspace(0.0, 1.0, 1000)
86 unSer1 = UnSerialized(a)
87 ser1 = ISerialized(unSer1)
88 td = ser1.getTypeDescriptor()
89 self.assert_(td == 'ndarray')
90 md = ser1.getMetadata()
91 self.assert_(md['shape'] == a.shape)
92 self.assert_(md['dtype'] == a.dtype.str)
93 buff = ser1.getData()
94 self.assert_(buff == numpy.getbuffer(a))
95 s = Serialized(buff, td, md)
96 us = IUnSerialized(s)
97 final = us.getObject()
98 self.assert_(numpy.getbuffer(a) == numpy.getbuffer(final))
99 self.assert_(a.dtype.str == final.dtype.str)
100 self.assert_(a.shape == final.shape)
101
77 102
78 def testNDArraySerialized(self):
79 try:
80 import numpy
81 except ImportError:
82 pass
83 else:
84 a = numpy.linspace(0.0, 1.0, 1000)
85 unSer1 = UnSerialized(a)
86 ser1 = ISerialized(unSer1)
87 td = ser1.getTypeDescriptor()
88 self.assert_(td == 'ndarray')
89 md = ser1.getMetadata()
90 self.assert_(md['shape'] == a.shape)
91 self.assert_(md['dtype'] == a.dtype.str)
92 buff = ser1.getData()
93 self.assert_(buff == numpy.getbuffer(a))
94 s = Serialized(buff, td, md)
95 us = IUnSerialized(s)
96 final = us.getObject()
97 self.assert_(numpy.getbuffer(a) == numpy.getbuffer(final))
98 self.assert_(a.dtype.str == final.dtype.str)
99 self.assert_(a.shape == final.shape)
100
101
102 No newline at end of file
@@ -1,186 +1,186
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3
4 4 """Tests for pendingdeferred.py"""
5 5
6 6 __docformat__ = "restructuredtext en"
7 7
8 8 #-------------------------------------------------------------------------------
9 9 # Copyright (C) 2008 The IPython Development Team
10 10 #
11 11 # Distributed under the terms of the BSD License. The full license is in
12 12 # the file COPYING, distributed as part of this software.
13 13 #-------------------------------------------------------------------------------
14 14
15 15 #-------------------------------------------------------------------------------
16 16 # Imports
17 17 #-------------------------------------------------------------------------------
18 18
19 19 try:
20 20 from twisted.internet import defer
21 21 from twisted.python import failure
22 22
23 23 from IPython.testing.util import DeferredTestCase
24 24 import IPython.kernel.pendingdeferred as pd
25 25 from IPython.kernel import error
26 26 from IPython.kernel.util import printer
27 27 except ImportError:
28 pass
29 else:
28 import nose
29 raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap")
30 30
31 class Foo(object):
32
33 def bar(self, bahz):
34 return defer.succeed('blahblah: %s' % bahz)
31 class Foo(object):
35 32
36 class TwoPhaseFoo(pd.PendingDeferredManager):
37
38 def __init__(self, foo):
39 self.foo = foo
40 pd.PendingDeferredManager.__init__(self)
33 def bar(self, bahz):
34 return defer.succeed('blahblah: %s' % bahz)
41 35
42 @pd.two_phase
43 def bar(self, bahz):
44 return self.foo.bar(bahz)
36 class TwoPhaseFoo(pd.PendingDeferredManager):
45 37
46 class PendingDeferredManagerTest(DeferredTestCase):
47
48 def setUp(self):
49 self.pdm = pd.PendingDeferredManager()
50
51 def tearDown(self):
52 pass
53
54 def testBasic(self):
55 dDict = {}
56 # Create 10 deferreds and save them
57 for i in range(10):
58 d = defer.Deferred()
59 did = self.pdm.save_pending_deferred(d)
60 dDict[did] = d
61 # Make sure they are begin saved
62 for k in dDict.keys():
63 self.assert_(self.pdm.quick_has_id(k))
64 # Get the pending deferred (block=True), then callback with 'foo' and compare
65 for did in dDict.keys()[0:5]:
66 d = self.pdm.get_pending_deferred(did,block=True)
67 dDict[did].callback('foo')
68 d.addCallback(lambda r: self.assert_(r=='foo'))
69 # Get the pending deferreds with (block=False) and make sure ResultNotCompleted is raised
70 for did in dDict.keys()[5:10]:
71 d = self.pdm.get_pending_deferred(did,block=False)
72 d.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException))
73 # Now callback the last 5, get them and compare.
74 for did in dDict.keys()[5:10]:
75 dDict[did].callback('foo')
76 d = self.pdm.get_pending_deferred(did,block=False)
77 d.addCallback(lambda r: self.assert_(r=='foo'))
78
79 def test_save_then_delete(self):
80 d = defer.Deferred()
81 did = self.pdm.save_pending_deferred(d)
82 self.assert_(self.pdm.quick_has_id(did))
83 self.pdm.delete_pending_deferred(did)
84 self.assert_(not self.pdm.quick_has_id(did))
85
86 def test_save_get_delete(self):
87 d = defer.Deferred()
88 did = self.pdm.save_pending_deferred(d)
89 d2 = self.pdm.get_pending_deferred(did,True)
90 d2.addErrback(lambda f: self.assertRaises(error.AbortedPendingDeferredError, f.raiseException))
91 self.pdm.delete_pending_deferred(did)
92 return d2
93
94 def test_double_get(self):
95 d = defer.Deferred()
96 did = self.pdm.save_pending_deferred(d)
97 d2 = self.pdm.get_pending_deferred(did,True)
98 d3 = self.pdm.get_pending_deferred(did,True)
99 d3.addErrback(lambda f: self.assertRaises(error.InvalidDeferredID, f.raiseException))
100
101 def test_get_after_callback(self):
102 d = defer.Deferred()
103 did = self.pdm.save_pending_deferred(d)
104 d.callback('foo')
105 d2 = self.pdm.get_pending_deferred(did,True)
106 d2.addCallback(lambda r: self.assertEquals(r,'foo'))
107 self.assert_(not self.pdm.quick_has_id(did))
38 def __init__(self, foo):
39 self.foo = foo
40 pd.PendingDeferredManager.__init__(self)
108 41
109 def test_get_before_callback(self):
110 d = defer.Deferred()
111 did = self.pdm.save_pending_deferred(d)
112 d2 = self.pdm.get_pending_deferred(did,True)
113 d.callback('foo')
114 d2.addCallback(lambda r: self.assertEquals(r,'foo'))
115 self.assert_(not self.pdm.quick_has_id(did))
116 d = defer.Deferred()
117 did = self.pdm.save_pending_deferred(d)
118 d2 = self.pdm.get_pending_deferred(did,True)
119 d2.addCallback(lambda r: self.assertEquals(r,'foo'))
120 d.callback('foo')
121 self.assert_(not self.pdm.quick_has_id(did))
122
123 def test_get_after_errback(self):
124 class MyError(Exception):
125 pass
126 d = defer.Deferred()
127 did = self.pdm.save_pending_deferred(d)
128 d.errback(failure.Failure(MyError('foo')))
129 d2 = self.pdm.get_pending_deferred(did,True)
130 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
131 self.assert_(not self.pdm.quick_has_id(did))
132
133 def test_get_before_errback(self):
134 class MyError(Exception):
135 pass
136 d = defer.Deferred()
137 did = self.pdm.save_pending_deferred(d)
138 d2 = self.pdm.get_pending_deferred(did,True)
139 d.errback(failure.Failure(MyError('foo')))
140 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
141 self.assert_(not self.pdm.quick_has_id(did))
142 d = defer.Deferred()
143 did = self.pdm.save_pending_deferred(d)
144 d2 = self.pdm.get_pending_deferred(did,True)
145 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
146 d.errback(failure.Failure(MyError('foo')))
147 self.assert_(not self.pdm.quick_has_id(did))
148
149 def test_noresult_noblock(self):
150 d = defer.Deferred()
151 did = self.pdm.save_pending_deferred(d)
152 d2 = self.pdm.get_pending_deferred(did,False)
153 d2.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException))
42 @pd.two_phase
43 def bar(self, bahz):
44 return self.foo.bar(bahz)
154 45
155 def test_with_callbacks(self):
156 d = defer.Deferred()
157 d.addCallback(lambda r: r+' foo')
158 d.addCallback(lambda r: r+' bar')
159 did = self.pdm.save_pending_deferred(d)
160 d2 = self.pdm.get_pending_deferred(did,True)
161 d.callback('bam')
162 d2.addCallback(lambda r: self.assertEquals(r,'bam foo bar'))
46 class PendingDeferredManagerTest(DeferredTestCase):
47
48 def setUp(self):
49 self.pdm = pd.PendingDeferredManager()
163 50
164 def test_with_errbacks(self):
165 class MyError(Exception):
166 pass
51 def tearDown(self):
52 pass
53
54 def testBasic(self):
55 dDict = {}
56 # Create 10 deferreds and save them
57 for i in range(10):
167 58 d = defer.Deferred()
168 d.addCallback(lambda r: 'foo')
169 d.addErrback(lambda f: 'caught error')
170 59 did = self.pdm.save_pending_deferred(d)
171 d2 = self.pdm.get_pending_deferred(did,True)
172 d.errback(failure.Failure(MyError('bam')))
173 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
60 dDict[did] = d
61 # Make sure they are begin saved
62 for k in dDict.keys():
63 self.assert_(self.pdm.quick_has_id(k))
64 # Get the pending deferred (block=True), then callback with 'foo' and compare
65 for did in dDict.keys()[0:5]:
66 d = self.pdm.get_pending_deferred(did,block=True)
67 dDict[did].callback('foo')
68 d.addCallback(lambda r: self.assert_(r=='foo'))
69 # Get the pending deferreds with (block=False) and make sure ResultNotCompleted is raised
70 for did in dDict.keys()[5:10]:
71 d = self.pdm.get_pending_deferred(did,block=False)
72 d.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException))
73 # Now callback the last 5, get them and compare.
74 for did in dDict.keys()[5:10]:
75 dDict[did].callback('foo')
76 d = self.pdm.get_pending_deferred(did,block=False)
77 d.addCallback(lambda r: self.assert_(r=='foo'))
78
79 def test_save_then_delete(self):
80 d = defer.Deferred()
81 did = self.pdm.save_pending_deferred(d)
82 self.assert_(self.pdm.quick_has_id(did))
83 self.pdm.delete_pending_deferred(did)
84 self.assert_(not self.pdm.quick_has_id(did))
85
86 def test_save_get_delete(self):
87 d = defer.Deferred()
88 did = self.pdm.save_pending_deferred(d)
89 d2 = self.pdm.get_pending_deferred(did,True)
90 d2.addErrback(lambda f: self.assertRaises(error.AbortedPendingDeferredError, f.raiseException))
91 self.pdm.delete_pending_deferred(did)
92 return d2
93
94 def test_double_get(self):
95 d = defer.Deferred()
96 did = self.pdm.save_pending_deferred(d)
97 d2 = self.pdm.get_pending_deferred(did,True)
98 d3 = self.pdm.get_pending_deferred(did,True)
99 d3.addErrback(lambda f: self.assertRaises(error.InvalidDeferredID, f.raiseException))
100
101 def test_get_after_callback(self):
102 d = defer.Deferred()
103 did = self.pdm.save_pending_deferred(d)
104 d.callback('foo')
105 d2 = self.pdm.get_pending_deferred(did,True)
106 d2.addCallback(lambda r: self.assertEquals(r,'foo'))
107 self.assert_(not self.pdm.quick_has_id(did))
108
109 def test_get_before_callback(self):
110 d = defer.Deferred()
111 did = self.pdm.save_pending_deferred(d)
112 d2 = self.pdm.get_pending_deferred(did,True)
113 d.callback('foo')
114 d2.addCallback(lambda r: self.assertEquals(r,'foo'))
115 self.assert_(not self.pdm.quick_has_id(did))
116 d = defer.Deferred()
117 did = self.pdm.save_pending_deferred(d)
118 d2 = self.pdm.get_pending_deferred(did,True)
119 d2.addCallback(lambda r: self.assertEquals(r,'foo'))
120 d.callback('foo')
121 self.assert_(not self.pdm.quick_has_id(did))
122
123 def test_get_after_errback(self):
124 class MyError(Exception):
125 pass
126 d = defer.Deferred()
127 did = self.pdm.save_pending_deferred(d)
128 d.errback(failure.Failure(MyError('foo')))
129 d2 = self.pdm.get_pending_deferred(did,True)
130 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
131 self.assert_(not self.pdm.quick_has_id(did))
132
133 def test_get_before_errback(self):
134 class MyError(Exception):
135 pass
136 d = defer.Deferred()
137 did = self.pdm.save_pending_deferred(d)
138 d2 = self.pdm.get_pending_deferred(did,True)
139 d.errback(failure.Failure(MyError('foo')))
140 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
141 self.assert_(not self.pdm.quick_has_id(did))
142 d = defer.Deferred()
143 did = self.pdm.save_pending_deferred(d)
144 d2 = self.pdm.get_pending_deferred(did,True)
145 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
146 d.errback(failure.Failure(MyError('foo')))
147 self.assert_(not self.pdm.quick_has_id(did))
174 148
175 def test_nested_deferreds(self):
176 d = defer.Deferred()
177 d2 = defer.Deferred()
178 d.addCallback(lambda r: d2)
179 did = self.pdm.save_pending_deferred(d)
180 d.callback('foo')
181 d3 = self.pdm.get_pending_deferred(did,False)
182 d3.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException))
183 d2.callback('bar')
184 d3 = self.pdm.get_pending_deferred(did,False)
185 d3.addCallback(lambda r: self.assertEquals(r,'bar'))
149 def test_noresult_noblock(self):
150 d = defer.Deferred()
151 did = self.pdm.save_pending_deferred(d)
152 d2 = self.pdm.get_pending_deferred(did,False)
153 d2.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException))
154
155 def test_with_callbacks(self):
156 d = defer.Deferred()
157 d.addCallback(lambda r: r+' foo')
158 d.addCallback(lambda r: r+' bar')
159 did = self.pdm.save_pending_deferred(d)
160 d2 = self.pdm.get_pending_deferred(did,True)
161 d.callback('bam')
162 d2.addCallback(lambda r: self.assertEquals(r,'bam foo bar'))
163
164 def test_with_errbacks(self):
165 class MyError(Exception):
166 pass
167 d = defer.Deferred()
168 d.addCallback(lambda r: 'foo')
169 d.addErrback(lambda f: 'caught error')
170 did = self.pdm.save_pending_deferred(d)
171 d2 = self.pdm.get_pending_deferred(did,True)
172 d.errback(failure.Failure(MyError('bam')))
173 d2.addErrback(lambda f: self.assertRaises(MyError, f.raiseException))
174
175 def test_nested_deferreds(self):
176 d = defer.Deferred()
177 d2 = defer.Deferred()
178 d.addCallback(lambda r: d2)
179 did = self.pdm.save_pending_deferred(d)
180 d.callback('foo')
181 d3 = self.pdm.get_pending_deferred(did,False)
182 d3.addErrback(lambda f: self.assertRaises(error.ResultNotCompleted, f.raiseException))
183 d2.callback('bar')
184 d3 = self.pdm.get_pending_deferred(did,False)
185 d3.addCallback(lambda r: self.assertEquals(r,'bar'))
186 186
@@ -1,50 +1,51
1 1 # encoding: utf-8
2 2
3 3 """This file contains unittests for the kernel.task.py module."""
4 4
5 5 __docformat__ = "restructuredtext en"
6 6
7 7 #-------------------------------------------------------------------------------
8 8 # Copyright (C) 2008 The IPython Development Team
9 9 #
10 10 # Distributed under the terms of the BSD License. The full license is in
11 11 # the file COPYING, distributed as part of this software.
12 12 #-------------------------------------------------------------------------------
13 13
14 14 #-------------------------------------------------------------------------------
15 15 # Imports
16 16 #-------------------------------------------------------------------------------
17 17
18 18 try:
19 19 import time
20 20
21 21 from twisted.internet import defer
22 22 from twisted.trial import unittest
23 23
24 24 from IPython.kernel import task, controllerservice as cs, engineservice as es
25 25 from IPython.kernel.multiengine import IMultiEngine
26 26 from IPython.testing.util import DeferredTestCase
27 27 from IPython.kernel.tests.tasktest import ITaskControllerTestCase
28 28 except ImportError:
29 pass
30 else:
31 #-------------------------------------------------------------------------------
32 # Tests
33 #-------------------------------------------------------------------------------
29 import nose
30 raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap")
34 31
35 class BasicTaskControllerTestCase(DeferredTestCase, ITaskControllerTestCase):
32 #-------------------------------------------------------------------------------
33 # Tests
34 #-------------------------------------------------------------------------------
35
36 class BasicTaskControllerTestCase(DeferredTestCase, ITaskControllerTestCase):
37
38 def setUp(self):
39 self.controller = cs.ControllerService()
40 self.controller.startService()
41 self.multiengine = IMultiEngine(self.controller)
42 self.tc = task.ITaskController(self.controller)
43 self.tc.failurePenalty = 0
44 self.engines=[]
36 45
37 def setUp(self):
38 self.controller = cs.ControllerService()
39 self.controller.startService()
40 self.multiengine = IMultiEngine(self.controller)
41 self.tc = task.ITaskController(self.controller)
42 self.tc.failurePenalty = 0
43 self.engines=[]
44
45 def tearDown(self):
46 self.controller.stopService()
47 for e in self.engines:
48 e.stopService()
46 def tearDown(self):
47 self.controller.stopService()
48 for e in self.engines:
49 e.stopService()
49 50
50 51
@@ -1,161 +1,162
1 1 #!/usr/bin/env python
2 2 # encoding: utf-8
3 3
4 4 __docformat__ = "restructuredtext en"
5 5
6 6 #-------------------------------------------------------------------------------
7 7 # Copyright (C) 2008 The IPython Development Team
8 8 #
9 9 # Distributed under the terms of the BSD License. The full license is in
10 10 # the file COPYING, distributed as part of this software.
11 11 #-------------------------------------------------------------------------------
12 12
13 13 #-------------------------------------------------------------------------------
14 14 # Imports
15 15 #-------------------------------------------------------------------------------
16 16
17 17 try:
18 18 import time
19 19
20 20 from twisted.internet import defer, reactor
21 21
22 22 from IPython.kernel.fcutil import Tub, UnauthenticatedTub
23 23
24 24 from IPython.kernel import task as taskmodule
25 25 from IPython.kernel import controllerservice as cs
26 26 import IPython.kernel.multiengine as me
27 27 from IPython.testing.util import DeferredTestCase
28 28 from IPython.kernel.multienginefc import IFCSynchronousMultiEngine
29 29 from IPython.kernel.taskfc import IFCTaskController
30 30 from IPython.kernel.util import printer
31 31 from IPython.kernel.tests.tasktest import ITaskControllerTestCase
32 32 from IPython.kernel.clientconnector import ClientConnector
33 33 from IPython.kernel.error import CompositeError
34 34 from IPython.kernel.parallelfunction import ParallelFunction
35 35 except ImportError:
36 pass
37 else:
36 import nose
37 raise nose.SkipTest("This test requires zope.interface, Twisted and Foolscap")
38 38
39 #-------------------------------------------------------------------------------
40 # Tests
41 #-------------------------------------------------------------------------------
42 39
43 def _raise_it(f):
44 try:
45 f.raiseException()
46 except CompositeError, e:
47 e.raise_exception()
40 #-------------------------------------------------------------------------------
41 # Tests
42 #-------------------------------------------------------------------------------
48 43
49 class TaskTest(DeferredTestCase, ITaskControllerTestCase):
44 def _raise_it(f):
45 try:
46 f.raiseException()
47 except CompositeError, e:
48 e.raise_exception()
50 49
51 def setUp(self):
52
53 self.engines = []
54
55 self.controller = cs.ControllerService()
56 self.controller.startService()
57 self.imultiengine = me.IMultiEngine(self.controller)
58 self.itc = taskmodule.ITaskController(self.controller)
59 self.itc.failurePenalty = 0
60
61 self.mec_referenceable = IFCSynchronousMultiEngine(self.imultiengine)
62 self.tc_referenceable = IFCTaskController(self.itc)
63
64 self.controller_tub = Tub()
65 self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1')
66 self.controller_tub.setLocation('127.0.0.1:10105')
67
68 mec_furl = self.controller_tub.registerReference(self.mec_referenceable)
69 tc_furl = self.controller_tub.registerReference(self.tc_referenceable)
70 self.controller_tub.startService()
71
72 self.client_tub = ClientConnector()
73 d = self.client_tub.get_multiengine_client(mec_furl)
74 d.addCallback(self.handle_mec_client)
75 d.addCallback(lambda _: self.client_tub.get_task_client(tc_furl))
76 d.addCallback(self.handle_tc_client)
77 return d
78
79 def handle_mec_client(self, client):
80 self.multiengine = client
50 class TaskTest(DeferredTestCase, ITaskControllerTestCase):
51
52 def setUp(self):
53
54 self.engines = []
55
56 self.controller = cs.ControllerService()
57 self.controller.startService()
58 self.imultiengine = me.IMultiEngine(self.controller)
59 self.itc = taskmodule.ITaskController(self.controller)
60 self.itc.failurePenalty = 0
61
62 self.mec_referenceable = IFCSynchronousMultiEngine(self.imultiengine)
63 self.tc_referenceable = IFCTaskController(self.itc)
64
65 self.controller_tub = Tub()
66 self.controller_tub.listenOn('tcp:10105:interface=127.0.0.1')
67 self.controller_tub.setLocation('127.0.0.1:10105')
68
69 mec_furl = self.controller_tub.registerReference(self.mec_referenceable)
70 tc_furl = self.controller_tub.registerReference(self.tc_referenceable)
71 self.controller_tub.startService()
72
73 self.client_tub = ClientConnector()
74 d = self.client_tub.get_multiengine_client(mec_furl)
75 d.addCallback(self.handle_mec_client)
76 d.addCallback(lambda _: self.client_tub.get_task_client(tc_furl))
77 d.addCallback(self.handle_tc_client)
78 return d
79
80 def handle_mec_client(self, client):
81 self.multiengine = client
82
83 def handle_tc_client(self, client):
84 self.tc = client
85
86 def tearDown(self):
87 dlist = []
88 # Shut down the multiengine client
89 d = self.client_tub.tub.stopService()
90 dlist.append(d)
91 # Shut down the engines
92 for e in self.engines:
93 e.stopService()
94 # Shut down the controller
95 d = self.controller_tub.stopService()
96 d.addBoth(lambda _: self.controller.stopService())
97 dlist.append(d)
98 return defer.DeferredList(dlist)
99
100 def test_mapper(self):
101 self.addEngine(1)
102 m = self.tc.mapper()
103 self.assertEquals(m.task_controller,self.tc)
104 self.assertEquals(m.clear_before,False)
105 self.assertEquals(m.clear_after,False)
106 self.assertEquals(m.retries,0)
107 self.assertEquals(m.recovery_task,None)
108 self.assertEquals(m.depend,None)
109 self.assertEquals(m.block,True)
110
111 def test_map_default(self):
112 self.addEngine(1)
113 m = self.tc.mapper()
114 d = m.map(lambda x: 2*x, range(10))
115 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
116 d.addCallback(lambda _: self.tc.map(lambda x: 2*x, range(10)))
117 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
118 return d
119
120 def test_map_noblock(self):
121 self.addEngine(1)
122 m = self.tc.mapper(block=False)
123 d = m.map(lambda x: 2*x, range(10))
124 d.addCallback(lambda r: self.assertEquals(r,[x for x in range(10)]))
125 return d
126
127 def test_mapper_fail(self):
128 self.addEngine(1)
129 m = self.tc.mapper()
130 d = m.map(lambda x: 1/0, range(10))
131 d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f))
132 return d
133
134 def test_parallel(self):
135 self.addEngine(1)
136 p = self.tc.parallel()
137 self.assert_(isinstance(p, ParallelFunction))
138 @p
139 def f(x): return 2*x
140 d = f(range(10))
141 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
142 return d
81 143
82 def handle_tc_client(self, client):
83 self.tc = client
144 def test_parallel_noblock(self):
145 self.addEngine(1)
146 p = self.tc.parallel(block=False)
147 self.assert_(isinstance(p, ParallelFunction))
148 @p
149 def f(x): return 2*x
150 d = f(range(10))
151 d.addCallback(lambda r: self.assertEquals(r,[x for x in range(10)]))
152 return d
84 153
85 def tearDown(self):
86 dlist = []
87 # Shut down the multiengine client
88 d = self.client_tub.tub.stopService()
89 dlist.append(d)
90 # Shut down the engines
91 for e in self.engines:
92 e.stopService()
93 # Shut down the controller
94 d = self.controller_tub.stopService()
95 d.addBoth(lambda _: self.controller.stopService())
96 dlist.append(d)
97 return defer.DeferredList(dlist)
98
99 def test_mapper(self):
100 self.addEngine(1)
101 m = self.tc.mapper()
102 self.assertEquals(m.task_controller,self.tc)
103 self.assertEquals(m.clear_before,False)
104 self.assertEquals(m.clear_after,False)
105 self.assertEquals(m.retries,0)
106 self.assertEquals(m.recovery_task,None)
107 self.assertEquals(m.depend,None)
108 self.assertEquals(m.block,True)
109
110 def test_map_default(self):
111 self.addEngine(1)
112 m = self.tc.mapper()
113 d = m.map(lambda x: 2*x, range(10))
114 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
115 d.addCallback(lambda _: self.tc.map(lambda x: 2*x, range(10)))
116 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
117 return d
118
119 def test_map_noblock(self):
120 self.addEngine(1)
121 m = self.tc.mapper(block=False)
122 d = m.map(lambda x: 2*x, range(10))
123 d.addCallback(lambda r: self.assertEquals(r,[x for x in range(10)]))
124 return d
125
126 def test_mapper_fail(self):
127 self.addEngine(1)
128 m = self.tc.mapper()
129 d = m.map(lambda x: 1/0, range(10))
130 d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f))
131 return d
132
133 def test_parallel(self):
134 self.addEngine(1)
135 p = self.tc.parallel()
136 self.assert_(isinstance(p, ParallelFunction))
137 @p
138 def f(x): return 2*x
139 d = f(range(10))
140 d.addCallback(lambda r: self.assertEquals(r,[2*x for x in range(10)]))
141 return d
142
143 def test_parallel_noblock(self):
144 self.addEngine(1)
145 p = self.tc.parallel(block=False)
146 self.assert_(isinstance(p, ParallelFunction))
147 @p
148 def f(x): return 2*x
149 d = f(range(10))
150 d.addCallback(lambda r: self.assertEquals(r,[x for x in range(10)]))
151 return d
152
153 def test_parallel_fail(self):
154 self.addEngine(1)
155 p = self.tc.parallel()
156 self.assert_(isinstance(p, ParallelFunction))
157 @p
158 def f(x): return 1/0
159 d = f(range(10))
160 d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f))
161 return d No newline at end of file
154 def test_parallel_fail(self):
155 self.addEngine(1)
156 p = self.tc.parallel()
157 self.assert_(isinstance(p, ParallelFunction))
158 @p
159 def f(x): return 1/0
160 d = f(range(10))
161 d.addBoth(lambda f: self.assertRaises(ZeroDivisionError, _raise_it, f))
162 return d No newline at end of file
@@ -1,135 +1,160
1 1 """Decorators for labeling test objects.
2 2
3 3 Decorators that merely return a modified version of the original
4 4 function object are straightforward. Decorators that return a new
5 5 function object need to use
6 6 nose.tools.make_decorator(original_function)(decorator) in returning
7 7 the decorator, in order to preserve metadata such as function name,
8 8 setup and teardown functions and so on - see nose.tools for more
9 9 information.
10 10
11 This module provides a set of useful decorators meant to be ready to use in
12 your own tests. See the bottom of the file for the ready-made ones, and if you
13 find yourself writing a new one that may be of generic use, add it here.
14
11 15 NOTE: This file contains IPython-specific decorators and imports the
12 16 numpy.testing.decorators file, which we've copied verbatim. Any of our own
13 17 code will be added at the bottom if we end up extending this.
14 18 """
15 19
16 20 # Stdlib imports
17 21 import inspect
22 import sys
18 23
19 24 # Third-party imports
20 25
21 26 # This is Michele Simionato's decorator module, also kept verbatim.
22 27 from decorator_msim import decorator, update_wrapper
23 28
24 29 # Grab the numpy-specific decorators which we keep in a file that we
25 30 # occasionally update from upstream: decorators_numpy.py is an IDENTICAL copy
26 31 # of numpy.testing.decorators.
27 32 from decorators_numpy import *
28 33
29 34 ##############################################################################
30 35 # Local code begins
31 36
32 37 # Utility functions
33 38
34 39 def apply_wrapper(wrapper,func):
35 40 """Apply a wrapper to a function for decoration.
36 41
37 42 This mixes Michele Simionato's decorator tool with nose's make_decorator,
38 43 to apply a wrapper in a decorator so that all nose attributes, as well as
39 44 function signature and other properties, survive the decoration cleanly.
40 45 This will ensure that wrapped functions can still be well introspected via
41 46 IPython, for example.
42 47 """
43 48 import nose.tools
44 49
45 50 return decorator(wrapper,nose.tools.make_decorator(func)(wrapper))
46 51
47 52
48 53 def make_label_dec(label,ds=None):
49 54 """Factory function to create a decorator that applies one or more labels.
50 55
51 56 :Parameters:
52 57 label : string or sequence
53 58 One or more labels that will be applied by the decorator to the functions
54 59 it decorates. Labels are attributes of the decorated function with their
55 60 value set to True.
56 61
57 62 :Keywords:
58 63 ds : string
59 64 An optional docstring for the resulting decorator. If not given, a
60 65 default docstring is auto-generated.
61 66
62 67 :Returns:
63 68 A decorator.
64 69
65 70 :Examples:
66 71
67 72 A simple labeling decorator:
68 73 >>> slow = make_label_dec('slow')
69 74 >>> print slow.__doc__
70 75 Labels a test as 'slow'.
71 76
72 77 And one that uses multiple labels and a custom docstring:
73 78 >>> rare = make_label_dec(['slow','hard'],
74 79 ... "Mix labels 'slow' and 'hard' for rare tests.")
75 80 >>> print rare.__doc__
76 81 Mix labels 'slow' and 'hard' for rare tests.
77 82
78 83 Now, let's test using this one:
79 84 >>> @rare
80 85 ... def f(): pass
81 86 ...
82 87 >>>
83 88 >>> f.slow
84 89 True
85 90 >>> f.hard
86 91 True
87 92 """
88 93
89 94 if isinstance(label,basestring):
90 95 labels = [label]
91 96 else:
92 97 labels = label
93 98
94 99 # Validate that the given label(s) are OK for use in setattr() by doing a
95 100 # dry run on a dummy function.
96 101 tmp = lambda : None
97 102 for label in labels:
98 103 setattr(tmp,label,True)
99 104
100 105 # This is the actual decorator we'll return
101 106 def decor(f):
102 107 for label in labels:
103 108 setattr(f,label,True)
104 109 return f
105 110
106 111 # Apply the user's docstring, or autogenerate a basic one
107 112 if ds is None:
108 113 ds = "Labels a test as %r." % label
109 114 decor.__doc__ = ds
110 115
111 116 return decor
112 117
113 118 #-----------------------------------------------------------------------------
114 119 # Decorators for public use
115 120
116 121 skip_doctest = make_label_dec('skip_doctest',
117 122 """Decorator - mark a function or method for skipping its doctest.
118 123
119 124 This decorator allows you to mark a function whose docstring you wish to
120 125 omit from testing, while preserving the docstring for introspection, help,
121 126 etc.""")
122 127
128 def skip(msg=''):
129 """Decorator - mark a test function for skipping from test suite.
130
131 This function *is* already a decorator, it is not a factory like
132 make_label_dec or some of those in decorators_numpy.
133
134 :Parameters:
135
136 func : function
137 Test function to be skipped
123 138
124 def skip(func):
125 """Decorator - mark a test function for skipping from test suite."""
139 msg : string
140 Optional message to be added.
141 """
126 142
127 143 import nose
128
129 def wrapper(*a,**k):
130 raise nose.SkipTest("Skipping test for function: %s" %
131 func.__name__)
132
133 return apply_wrapper(wrapper,func)
134 144
145 def inner(func):
146
147 def wrapper(*a,**k):
148 if msg: out = '\n'+msg
149 else: out = ''
150 raise nose.SkipTest("Skipping test for function: %s%s" %
151 (func.__name__,out))
152
153 return apply_wrapper(wrapper,func)
154
155 return inner
135 156
157 # Decorators to skip certain tests on specific platforms.
158 skip_win32 = skipif(sys.platform=='win32',"This test does not run under Windows")
159 skip_linux = skipif(sys.platform=='linux2',"This test does not run under Linux")
160 skip_osx = skipif(sys.platform=='darwin',"This test does not run under OSX")
1 NO CONTENT: modified file chmod 100755 => 100644
@@ -1,75 +1,74
1 1 # Set this prefix to where you want to install the plugin
2 PREFIX=~/usr/local
3 PREFIX=~/tmp/local
2 PREFIX=/usr/local
4 3
5 4 NOSE0=nosetests -vs --with-doctest --doctest-tests --detailed-errors
6 5 NOSE=nosetests -vvs --with-ipdoctest --doctest-tests --doctest-extension=txt \
7 6 --detailed-errors
8 7
9 8 SRC=ipdoctest.py setup.py ../decorators.py
10 9
11 10 # Default target for clean 'make'
12 11 default: iplib
13 12
14 13 # The actual plugin installation
15 14 plugin: IPython_doctest_plugin.egg-info
16 15
17 16 # Simple targets that test one thing
18 17 simple: plugin simple.py
19 18 $(NOSE) simple.py
20 19
21 20 dtest: plugin dtexample.py
22 21 $(NOSE) dtexample.py
23 22
24 23 rtest: plugin test_refs.py
25 24 $(NOSE) test_refs.py
26 25
27 26 test: plugin dtexample.py
28 27 $(NOSE) dtexample.py test*.py test*.txt
29 28
30 29 deb: plugin dtexample.py
31 30 $(NOSE) test_combo.txt
32 31
33 32 # IPython tests
34 33 deco:
35 34 $(NOSE0) IPython.testing.decorators
36 35
37 36 magic: plugin
38 37 $(NOSE) IPython.Magic
39 38
40 39 ipipe: plugin
41 40 $(NOSE) IPython.Extensions.ipipe
42 41
43 42 iplib: plugin
44 43 $(NOSE) IPython.iplib
45 44
46 45 strd: plugin
47 46 $(NOSE) IPython.strdispatch
48 47
49 48 engine: plugin
50 49 $(NOSE) IPython.kernel
51 50
52 51 tf: plugin
53 52 $(NOSE) IPython.config.traitlets
54 53
55 54 # All of ipython itself
56 55 ipython: plugin
57 56 $(NOSE) IPython
58 57
59 58
60 59 # Combined targets
61 60 sr: rtest strd
62 61
63 62 base: dtest rtest test strd deco
64 63
65 64 quick: base iplib ipipe
66 65
67 66 all: base ipython
68 67
69 68 # Main plugin and cleanup
70 69 IPython_doctest_plugin.egg-info: $(SRC)
71 70 python setup.py install --prefix=$(PREFIX)
72 71 touch $@
73 72
74 73 clean:
75 74 rm -rf IPython_doctest_plugin.egg-info *~ *pyc build/ dist/
@@ -1,784 +1,806
1 1 """Nose Plugin that supports IPython doctests.
2 2
3 3 Limitations:
4 4
5 5 - When generating examples for use as doctests, make sure that you have
6 6 pretty-printing OFF. This can be done either by starting ipython with the
7 7 flag '--nopprint', by setting pprint to 0 in your ipythonrc file, or by
8 8 interactively disabling it with %Pprint. This is required so that IPython
9 9 output matches that of normal Python, which is used by doctest for internal
10 10 execution.
11 11
12 12 - Do not rely on specific prompt numbers for results (such as using
13 13 '_34==True', for example). For IPython tests run via an external process the
14 14 prompt numbers may be different, and IPython tests run as normal python code
15 15 won't even have these special _NN variables set at all.
16 16 """
17 17
18 18
19 19 #-----------------------------------------------------------------------------
20 20 # Module imports
21 21
22 22 # From the standard library
23 23 import __builtin__
24 24 import commands
25 25 import doctest
26 26 import inspect
27 27 import logging
28 28 import os
29 29 import re
30 30 import sys
31 31 import traceback
32 32 import unittest
33 33
34 34 from inspect import getmodule
35 35 from StringIO import StringIO
36 36
37 37 # We are overriding the default doctest runner, so we need to import a few
38 38 # things from doctest directly
39 39 from doctest import (REPORTING_FLAGS, REPORT_ONLY_FIRST_FAILURE,
40 40 _unittest_reportflags, DocTestRunner,
41 41 _extract_future_flags, pdb, _OutputRedirectingPdb,
42 42 _exception_traceback,
43 43 linecache)
44 44
45 45 # Third-party modules
46 46 import nose.core
47 47
48 48 from nose.plugins import doctests, Plugin
49 49 from nose.util import anyp, getpackage, test_address, resolve_name, tolist
50 50
51 51 #-----------------------------------------------------------------------------
52 52 # Module globals and other constants
53 53
54 54 log = logging.getLogger(__name__)
55 55
56 56 ###########################################################################
57 57 # *** HACK ***
58 58 # We must start our own ipython object and heavily muck with it so that all the
59 59 # modifications IPython makes to system behavior don't send the doctest
60 60 # machinery into a fit. This code should be considered a gross hack, but it
61 61 # gets the job done.
62 62
63 63
64 64 # Hack to modify the %run command so we can sync the user's namespace with the
65 65 # test globals. Once we move over to a clean magic system, this will be done
66 66 # with much less ugliness.
67 67
68 68 def _run_ns_sync(self,arg_s,runner=None):
69 69 """Modified version of %run that syncs testing namespaces.
70 70
71 71 This is strictly needed for running doctests that call %run.
72 72 """
73 73
74 74 out = _ip.IP.magic_run_ori(arg_s,runner)
75 75 _run_ns_sync.test_globs.update(_ip.user_ns)
76 76 return out
77 77
78 78
79 79 class ipnsdict(dict):
80 80 """A special subclass of dict for use as an IPython namespace in doctests.
81 81
82 82 This subclass adds a simple checkpointing capability so that when testing
83 83 machinery clears it (we use it as the test execution context), it doesn't
84 84 get completely destroyed.
85 85 """
86 86
87 87 def __init__(self,*a):
88 88 dict.__init__(self,*a)
89 89 self._savedict = {}
90 90
91 91 def clear(self):
92 92 dict.clear(self)
93 93 self.update(self._savedict)
94 94
95 95 def _checkpoint(self):
96 96 self._savedict.clear()
97 97 self._savedict.update(self)
98 98
99 99 def update(self,other):
100 100 self._checkpoint()
101 101 dict.update(self,other)
102 102 # If '_' is in the namespace, python won't set it when executing code,
103 103 # and we have examples that test it. So we ensure that the namespace
104 104 # is always 'clean' of it before it's used for test code execution.
105 105 self.pop('_',None)
106 106
107 107
108 108 def start_ipython():
109 109 """Start a global IPython shell, which we need for IPython-specific syntax.
110 110 """
111 111 import new
112 112
113 113 import IPython
114 114
115 115 def xsys(cmd):
116 116 """Execute a command and print its output.
117 117
118 118 This is just a convenience function to replace the IPython system call
119 119 with one that is more doctest-friendly.
120 120 """
121 121 cmd = _ip.IP.var_expand(cmd,depth=1)
122 122 sys.stdout.write(commands.getoutput(cmd))
123 123 sys.stdout.flush()
124 124
125 125 # Store certain global objects that IPython modifies
126 126 _displayhook = sys.displayhook
127 127 _excepthook = sys.excepthook
128 128 _main = sys.modules.get('__main__')
129 129
130 130 # Start IPython instance. We customize it to start with minimal frills.
131 131 user_ns,global_ns = IPython.ipapi.make_user_namespaces(ipnsdict(),dict())
132 132
133 133 IPython.Shell.IPShell(['--classic','--noterm_title'],
134 134 user_ns,global_ns)
135 135
136 136 # Deactivate the various python system hooks added by ipython for
137 137 # interactive convenience so we don't confuse the doctest system
138 138 sys.modules['__main__'] = _main
139 139 sys.displayhook = _displayhook
140 140 sys.excepthook = _excepthook
141 141
142 142 # So that ipython magics and aliases can be doctested (they work by making
143 143 # a call into a global _ip object)
144 144 _ip = IPython.ipapi.get()
145 145 __builtin__._ip = _ip
146 146
147 147 # Modify the IPython system call with one that uses getoutput, so that we
148 148 # can capture subcommands and print them to Python's stdout, otherwise the
149 149 # doctest machinery would miss them.
150 150 _ip.system = xsys
151 151
152 152 # Also patch our %run function in.
153 153 im = new.instancemethod(_run_ns_sync,_ip.IP, _ip.IP.__class__)
154 154 _ip.IP.magic_run_ori = _ip.IP.magic_run
155 155 _ip.IP.magic_run = im
156 156
157 157 # The start call MUST be made here. I'm not sure yet why it doesn't work if
158 158 # it is made later, at plugin initialization time, but in all my tests, that's
159 159 # the case.
160 160 start_ipython()
161 161
162 162 # *** END HACK ***
163 163 ###########################################################################
164 164
165 165 # Classes and functions
166 166
167 167 def is_extension_module(filename):
168 168 """Return whether the given filename is an extension module.
169 169
170 170 This simply checks that the extension is either .so or .pyd.
171 171 """
172 172 return os.path.splitext(filename)[1].lower() in ('.so','.pyd')
173 173
174 174
175 175 class nodoc(object):
176 176 def __init__(self,obj):
177 177 self.obj = obj
178 178
179 179 def __getattribute__(self,key):
180 180 if key == '__doc__':
181 181 return None
182 182 else:
183 183 return getattr(object.__getattribute__(self,'obj'),key)
184 184
185 185 # Modified version of the one in the stdlib, that fixes a python bug (doctests
186 186 # not found in extension modules, http://bugs.python.org/issue3158)
187 187 class DocTestFinder(doctest.DocTestFinder):
188 188
189 189 def _from_module(self, module, object):
190 190 """
191 191 Return true if the given object is defined in the given
192 192 module.
193 193 """
194 194 if module is None:
195 195 return True
196 196 elif inspect.isfunction(object):
197 197 return module.__dict__ is object.func_globals
198 198 elif inspect.isbuiltin(object):
199 199 return module.__name__ == object.__module__
200 200 elif inspect.isclass(object):
201 201 return module.__name__ == object.__module__
202 202 elif inspect.ismethod(object):
203 203 # This one may be a bug in cython that fails to correctly set the
204 204 # __module__ attribute of methods, but since the same error is easy
205 205 # to make by extension code writers, having this safety in place
206 206 # isn't such a bad idea
207 207 return module.__name__ == object.im_class.__module__
208 208 elif inspect.getmodule(object) is not None:
209 209 return module is inspect.getmodule(object)
210 210 elif hasattr(object, '__module__'):
211 211 return module.__name__ == object.__module__
212 212 elif isinstance(object, property):
213 213 return True # [XX] no way not be sure.
214 214 else:
215 215 raise ValueError("object must be a class or function")
216 216
217 217 def _find(self, tests, obj, name, module, source_lines, globs, seen):
218 218 """
219 219 Find tests for the given object and any contained objects, and
220 220 add them to `tests`.
221 221 """
222 222
223 223 if hasattr(obj,"skip_doctest"):
224 224 #print 'SKIPPING DOCTEST FOR:',obj # dbg
225 225 obj = nodoc(obj)
226 226
227 227 doctest.DocTestFinder._find(self,tests, obj, name, module,
228 228 source_lines, globs, seen)
229 229
230 230 # Below we re-run pieces of the above method with manual modifications,
231 231 # because the original code is buggy and fails to correctly identify
232 232 # doctests in extension modules.
233 233
234 234 # Local shorthands
235 235 from inspect import isroutine, isclass, ismodule
236 236
237 237 # Look for tests in a module's contained objects.
238 238 if inspect.ismodule(obj) and self._recurse:
239 239 for valname, val in obj.__dict__.items():
240 240 valname1 = '%s.%s' % (name, valname)
241 241 if ( (isroutine(val) or isclass(val))
242 242 and self._from_module(module, val) ):
243 243
244 244 self._find(tests, val, valname1, module, source_lines,
245 245 globs, seen)
246 246
247 247 # Look for tests in a class's contained objects.
248 248 if inspect.isclass(obj) and self._recurse:
249 249 #print 'RECURSE into class:',obj # dbg
250 250 for valname, val in obj.__dict__.items():
251 251 # Special handling for staticmethod/classmethod.
252 252 if isinstance(val, staticmethod):
253 253 val = getattr(obj, valname)
254 254 if isinstance(val, classmethod):
255 255 val = getattr(obj, valname).im_func
256 256
257 257 # Recurse to methods, properties, and nested classes.
258 258 if ((inspect.isfunction(val) or inspect.isclass(val) or
259 259 inspect.ismethod(val) or
260 260 isinstance(val, property)) and
261 261 self._from_module(module, val)):
262 262 valname = '%s.%s' % (name, valname)
263 263 self._find(tests, val, valname, module, source_lines,
264 264 globs, seen)
265 265
266 266
267 267 class IPDoctestOutputChecker(doctest.OutputChecker):
268 268 """Second-chance checker with support for random tests.
269 269
270 270 If the default comparison doesn't pass, this checker looks in the expected
271 271 output string for flags that tell us to ignore the output.
272 272 """
273 273
274 274 random_re = re.compile(r'#\s*random\s+')
275 275
276 276 def check_output(self, want, got, optionflags):
277 277 """Check output, accepting special markers embedded in the output.
278 278
279 279 If the output didn't pass the default validation but the special string
280 280 '#random' is included, we accept it."""
281 281
282 282 # Let the original tester verify first, in case people have valid tests
283 283 # that happen to have a comment saying '#random' embedded in.
284 284 ret = doctest.OutputChecker.check_output(self, want, got,
285 285 optionflags)
286 286 if not ret and self.random_re.search(want):
287 287 #print >> sys.stderr, 'RANDOM OK:',want # dbg
288 288 return True
289 289
290 290 return ret
291 291
292 292
293 293 class DocTestCase(doctests.DocTestCase):
294 294 """Proxy for DocTestCase: provides an address() method that
295 295 returns the correct address for the doctest case. Otherwise
296 296 acts as a proxy to the test case. To provide hints for address(),
297 297 an obj may also be passed -- this will be used as the test object
298 298 for purposes of determining the test address, if it is provided.
299 299 """
300 300
301 301 # Note: this method was taken from numpy's nosetester module.
302 302
303 303 # Subclass nose.plugins.doctests.DocTestCase to work around a bug in
304 304 # its constructor that blocks non-default arguments from being passed
305 305 # down into doctest.DocTestCase
306 306
307 307 def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
308 308 checker=None, obj=None, result_var='_'):
309 309 self._result_var = result_var
310 310 doctests.DocTestCase.__init__(self, test,
311 311 optionflags=optionflags,
312 312 setUp=setUp, tearDown=tearDown,
313 313 checker=checker)
314 314 # Now we must actually copy the original constructor from the stdlib
315 315 # doctest class, because we can't call it directly and a bug in nose
316 316 # means it never gets passed the right arguments.
317 317
318 318 self._dt_optionflags = optionflags
319 319 self._dt_checker = checker
320 320 self._dt_test = test
321 321 self._dt_setUp = setUp
322 322 self._dt_tearDown = tearDown
323 323
324 324 # XXX - store this runner once in the object!
325 325 runner = IPDocTestRunner(optionflags=optionflags,
326 326 checker=checker, verbose=False)
327 327 self._dt_runner = runner
328 328
329 329
330 330 # Each doctest should remember what directory it was loaded from...
331 331 self._ori_dir = os.getcwd()
332 332
333 333 # Modified runTest from the default stdlib
334 334 def runTest(self):
335 335 test = self._dt_test
336 336 runner = self._dt_runner
337 337
338 338 old = sys.stdout
339 339 new = StringIO()
340 340 optionflags = self._dt_optionflags
341 341
342 342 if not (optionflags & REPORTING_FLAGS):
343 343 # The option flags don't include any reporting flags,
344 344 # so add the default reporting flags
345 345 optionflags |= _unittest_reportflags
346 346
347 347 try:
348 348 # Save our current directory and switch out to the one where the
349 349 # test was originally created, in case another doctest did a
350 350 # directory change. We'll restore this in the finally clause.
351 351 curdir = os.getcwd()
352 352 os.chdir(self._ori_dir)
353 353
354 354 runner.DIVIDER = "-"*70
355 355 failures, tries = runner.run(test,out=new.write,
356 356 clear_globs=False)
357 357 finally:
358 358 sys.stdout = old
359 359 os.chdir(curdir)
360 360
361 361 if failures:
362 362 raise self.failureException(self.format_failure(new.getvalue()))
363 363
364 364 def setUp(self):
365 365 """Modified test setup that syncs with ipython namespace"""
366 366
367 367 if isinstance(self._dt_test.examples[0],IPExample):
368 368 # for IPython examples *only*, we swap the globals with the ipython
369 369 # namespace, after updating it with the globals (which doctest
370 370 # fills with the necessary info from the module being tested).
371 371 _ip.IP.user_ns.update(self._dt_test.globs)
372 372 self._dt_test.globs = _ip.IP.user_ns
373 373
374 374 doctests.DocTestCase.setUp(self)
375 375
376 376
377 377
378 378 # A simple subclassing of the original with a different class name, so we can
379 379 # distinguish and treat differently IPython examples from pure python ones.
380 380 class IPExample(doctest.Example): pass
381 381
382 382
383 383 class IPExternalExample(doctest.Example):
384 384 """Doctest examples to be run in an external process."""
385 385
386 386 def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
387 387 options=None):
388 388 # Parent constructor
389 389 doctest.Example.__init__(self,source,want,exc_msg,lineno,indent,options)
390 390
391 391 # An EXTRA newline is needed to prevent pexpect hangs
392 392 self.source += '\n'
393 393
394 394
395 395 class IPDocTestParser(doctest.DocTestParser):
396 396 """
397 397 A class used to parse strings containing doctest examples.
398 398
399 399 Note: This is a version modified to properly recognize IPython input and
400 400 convert any IPython examples into valid Python ones.
401 401 """
402 402 # This regular expression is used to find doctest examples in a
403 403 # string. It defines three groups: `source` is the source code
404 404 # (including leading indentation and prompts); `indent` is the
405 405 # indentation of the first (PS1) line of the source code; and
406 406 # `want` is the expected output (including leading indentation).
407 407
408 408 # Classic Python prompts or default IPython ones
409 409 _PS1_PY = r'>>>'
410 410 _PS2_PY = r'\.\.\.'
411 411
412 412 _PS1_IP = r'In\ \[\d+\]:'
413 413 _PS2_IP = r'\ \ \ \.\.\.+:'
414 414
415 415 _RE_TPL = r'''
416 416 # Source consists of a PS1 line followed by zero or more PS2 lines.
417 417 (?P<source>
418 418 (?:^(?P<indent> [ ]*) (?P<ps1> %s) .*) # PS1 line
419 419 (?:\n [ ]* (?P<ps2> %s) .*)*) # PS2 lines
420 420 \n? # a newline
421 421 # Want consists of any non-blank lines that do not start with PS1.
422 422 (?P<want> (?:(?![ ]*$) # Not a blank line
423 423 (?![ ]*%s) # Not a line starting with PS1
424 424 (?![ ]*%s) # Not a line starting with PS2
425 425 .*$\n? # But any other line
426 426 )*)
427 427 '''
428 428
429 429 _EXAMPLE_RE_PY = re.compile( _RE_TPL % (_PS1_PY,_PS2_PY,_PS1_PY,_PS2_PY),
430 430 re.MULTILINE | re.VERBOSE)
431 431
432 432 _EXAMPLE_RE_IP = re.compile( _RE_TPL % (_PS1_IP,_PS2_IP,_PS1_IP,_PS2_IP),
433 433 re.MULTILINE | re.VERBOSE)
434 434
435 435 # Mark a test as being fully random. In this case, we simply append the
436 436 # random marker ('#random') to each individual example's output. This way
437 437 # we don't need to modify any other code.
438 438 _RANDOM_TEST = re.compile(r'#\s*all-random\s+')
439 439
440 440 # Mark tests to be executed in an external process - currently unsupported.
441 441 _EXTERNAL_IP = re.compile(r'#\s*ipdoctest:\s*EXTERNAL')
442 442
443 443 def ip2py(self,source):
444 444 """Convert input IPython source into valid Python."""
445 445 out = []
446 446 newline = out.append
447 447 for lnum,line in enumerate(source.splitlines()):
448 448 newline(_ip.IP.prefilter(line,lnum>0))
449 449 newline('') # ensure a closing newline, needed by doctest
450 450 #print "PYSRC:", '\n'.join(out) # dbg
451 451 return '\n'.join(out)
452 452
453 453 def parse(self, string, name='<string>'):
454 454 """
455 455 Divide the given string into examples and intervening text,
456 456 and return them as a list of alternating Examples and strings.
457 457 Line numbers for the Examples are 0-based. The optional
458 458 argument `name` is a name identifying this string, and is only
459 459 used for error messages.
460 460 """
461 461
462 462 #print 'Parse string:\n',string # dbg
463 463
464 464 string = string.expandtabs()
465 465 # If all lines begin with the same indentation, then strip it.
466 466 min_indent = self._min_indent(string)
467 467 if min_indent > 0:
468 468 string = '\n'.join([l[min_indent:] for l in string.split('\n')])
469 469
470 470 output = []
471 471 charno, lineno = 0, 0
472 472
473 473 # We make 'all random' tests by adding the '# random' mark to every
474 474 # block of output in the test.
475 475 if self._RANDOM_TEST.search(string):
476 476 random_marker = '\n# random'
477 477 else:
478 478 random_marker = ''
479 479
480 480 # Whether to convert the input from ipython to python syntax
481 481 ip2py = False
482 482 # Find all doctest examples in the string. First, try them as Python
483 483 # examples, then as IPython ones
484 484 terms = list(self._EXAMPLE_RE_PY.finditer(string))
485 485 if terms:
486 486 # Normal Python example
487 487 #print '-'*70 # dbg
488 488 #print 'PyExample, Source:\n',string # dbg
489 489 #print '-'*70 # dbg
490 490 Example = doctest.Example
491 491 else:
492 492 # It's an ipython example. Note that IPExamples are run
493 493 # in-process, so their syntax must be turned into valid python.
494 494 # IPExternalExamples are run out-of-process (via pexpect) so they
495 495 # don't need any filtering (a real ipython will be executing them).
496 496 terms = list(self._EXAMPLE_RE_IP.finditer(string))
497 497 if self._EXTERNAL_IP.search(string):
498 498 #print '-'*70 # dbg
499 499 #print 'IPExternalExample, Source:\n',string # dbg
500 500 #print '-'*70 # dbg
501 501 Example = IPExternalExample
502 502 else:
503 503 #print '-'*70 # dbg
504 504 #print 'IPExample, Source:\n',string # dbg
505 505 #print '-'*70 # dbg
506 506 Example = IPExample
507 507 ip2py = True
508 508
509 509 for m in terms:
510 510 # Add the pre-example text to `output`.
511 511 output.append(string[charno:m.start()])
512 512 # Update lineno (lines before this example)
513 513 lineno += string.count('\n', charno, m.start())
514 514 # Extract info from the regexp match.
515 515 (source, options, want, exc_msg) = \
516 516 self._parse_example(m, name, lineno,ip2py)
517 517
518 518 # Append the random-output marker (it defaults to empty in most
519 519 # cases, it's only non-empty for 'all-random' tests):
520 520 want += random_marker
521 521
522 522 if Example is IPExternalExample:
523 523 options[doctest.NORMALIZE_WHITESPACE] = True
524 524 want += '\n'
525 525
526 526 # Create an Example, and add it to the list.
527 527 if not self._IS_BLANK_OR_COMMENT(source):
528 528 output.append(Example(source, want, exc_msg,
529 529 lineno=lineno,
530 530 indent=min_indent+len(m.group('indent')),
531 531 options=options))
532 532 # Update lineno (lines inside this example)
533 533 lineno += string.count('\n', m.start(), m.end())
534 534 # Update charno.
535 535 charno = m.end()
536 536 # Add any remaining post-example text to `output`.
537 537 output.append(string[charno:])
538 538 return output
539 539
540 540 def _parse_example(self, m, name, lineno,ip2py=False):
541 541 """
542 542 Given a regular expression match from `_EXAMPLE_RE` (`m`),
543 543 return a pair `(source, want)`, where `source` is the matched
544 544 example's source code (with prompts and indentation stripped);
545 545 and `want` is the example's expected output (with indentation
546 546 stripped).
547 547
548 548 `name` is the string's name, and `lineno` is the line number
549 549 where the example starts; both are used for error messages.
550 550
551 551 Optional:
552 552 `ip2py`: if true, filter the input via IPython to convert the syntax
553 553 into valid python.
554 554 """
555 555
556 556 # Get the example's indentation level.
557 557 indent = len(m.group('indent'))
558 558
559 559 # Divide source into lines; check that they're properly
560 560 # indented; and then strip their indentation & prompts.
561 561 source_lines = m.group('source').split('\n')
562 562
563 563 # We're using variable-length input prompts
564 564 ps1 = m.group('ps1')
565 565 ps2 = m.group('ps2')
566 566 ps1_len = len(ps1)
567 567
568 568 self._check_prompt_blank(source_lines, indent, name, lineno,ps1_len)
569 569 if ps2:
570 570 self._check_prefix(source_lines[1:], ' '*indent + ps2, name, lineno)
571 571
572 572 source = '\n'.join([sl[indent+ps1_len+1:] for sl in source_lines])
573 573
574 574 if ip2py:
575 575 # Convert source input from IPython into valid Python syntax
576 576 source = self.ip2py(source)
577 577
578 578 # Divide want into lines; check that it's properly indented; and
579 579 # then strip the indentation. Spaces before the last newline should
580 580 # be preserved, so plain rstrip() isn't good enough.
581 581 want = m.group('want')
582 582 want_lines = want.split('\n')
583 583 if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
584 584 del want_lines[-1] # forget final newline & spaces after it
585 585 self._check_prefix(want_lines, ' '*indent, name,
586 586 lineno + len(source_lines))
587 587
588 588 # Remove ipython output prompt that might be present in the first line
589 589 want_lines[0] = re.sub(r'Out\[\d+\]: \s*?\n?','',want_lines[0])
590 590
591 591 want = '\n'.join([wl[indent:] for wl in want_lines])
592 592
593 593 # If `want` contains a traceback message, then extract it.
594 594 m = self._EXCEPTION_RE.match(want)
595 595 if m:
596 596 exc_msg = m.group('msg')
597 597 else:
598 598 exc_msg = None
599 599
600 600 # Extract options from the source.
601 601 options = self._find_options(source, name, lineno)
602 602
603 603 return source, options, want, exc_msg
604 604
605 605 def _check_prompt_blank(self, lines, indent, name, lineno, ps1_len):
606 606 """
607 607 Given the lines of a source string (including prompts and
608 608 leading indentation), check to make sure that every prompt is
609 609 followed by a space character. If any line is not followed by
610 610 a space character, then raise ValueError.
611 611
612 612 Note: IPython-modified version which takes the input prompt length as a
613 613 parameter, so that prompts of variable length can be dealt with.
614 614 """
615 615 space_idx = indent+ps1_len
616 616 min_len = space_idx+1
617 617 for i, line in enumerate(lines):
618 618 if len(line) >= min_len and line[space_idx] != ' ':
619 619 raise ValueError('line %r of the docstring for %s '
620 620 'lacks blank after %s: %r' %
621 621 (lineno+i+1, name,
622 622 line[indent:space_idx], line))
623 623
624 624
625 625 SKIP = doctest.register_optionflag('SKIP')
626 626
627 627
628 628 class IPDocTestRunner(doctest.DocTestRunner,object):
629 629 """Test runner that synchronizes the IPython namespace with test globals.
630 630 """
631 631
632 632 def run(self, test, compileflags=None, out=None, clear_globs=True):
633 633
634 634 # Hack: ipython needs access to the execution context of the example,
635 635 # so that it can propagate user variables loaded by %run into
636 636 # test.globs. We put them here into our modified %run as a function
637 637 # attribute. Our new %run will then only make the namespace update
638 638 # when called (rather than unconconditionally updating test.globs here
639 639 # for all examples, most of which won't be calling %run anyway).
640 640 _run_ns_sync.test_globs = test.globs
641 641
642 642 return super(IPDocTestRunner,self).run(test,
643 643 compileflags,out,clear_globs)
644 644
645 645
646 646 class DocFileCase(doctest.DocFileCase):
647 647 """Overrides to provide filename
648 648 """
649 649 def address(self):
650 650 return (self._dt_test.filename, None, None)
651 651
652 652
653 653 class ExtensionDoctest(doctests.Doctest):
654 654 """Nose Plugin that supports doctests in extension modules.
655 655 """
656 656 name = 'extdoctest' # call nosetests with --with-extdoctest
657 657 enabled = True
658 658
659 659 def options(self, parser, env=os.environ):
660 660 Plugin.options(self, parser, env)
661 parser.add_option('--doctest-tests', action='store_true',
662 dest='doctest_tests',
663 default=env.get('NOSE_DOCTEST_TESTS',True),
664 help="Also look for doctests in test modules. "
665 "Note that classes, methods and functions should "
666 "have either doctests or non-doctest tests, "
667 "not both. [NOSE_DOCTEST_TESTS]")
668 parser.add_option('--doctest-extension', action="append",
669 dest="doctestExtension",
670 help="Also look for doctests in files with "
671 "this extension [NOSE_DOCTEST_EXTENSION]")
672 # Set the default as a list, if given in env; otherwise
673 # an additional value set on the command line will cause
674 # an error.
675 env_setting = env.get('NOSE_DOCTEST_EXTENSION')
676 if env_setting is not None:
677 parser.set_defaults(doctestExtension=tolist(env_setting))
678
661 679
662 680 def configure(self, options, config):
663 681 Plugin.configure(self, options, config)
664 682 self.doctest_tests = options.doctest_tests
665 683 self.extension = tolist(options.doctestExtension)
666 684
667 685 self.parser = doctest.DocTestParser()
668 686 self.finder = DocTestFinder()
669 687 self.checker = IPDoctestOutputChecker()
670 688 self.globs = None
671 689 self.extraglobs = None
672 690
673 691 def loadTestsFromExtensionModule(self,filename):
674 692 bpath,mod = os.path.split(filename)
675 693 modname = os.path.splitext(mod)[0]
676 694 try:
677 695 sys.path.append(bpath)
678 696 module = __import__(modname)
679 697 tests = list(self.loadTestsFromModule(module))
680 698 finally:
681 699 sys.path.pop()
682 700 return tests
683 701
684 702 # NOTE: the method below is almost a copy of the original one in nose, with
685 703 # a few modifications to control output checking.
686 704
687 705 def loadTestsFromModule(self, module):
688 706 #print 'lTM',module # dbg
689 707
690 708 if not self.matches(module.__name__):
691 709 log.debug("Doctest doesn't want module %s", module)
692 710 return
693 711
694 712 tests = self.finder.find(module,globs=self.globs,
695 713 extraglobs=self.extraglobs)
696 714 if not tests:
697 715 return
698 716
699 717 # always use whitespace and ellipsis options
700 718 optionflags = doctest.NORMALIZE_WHITESPACE | doctest.ELLIPSIS
701 719
702 720 tests.sort()
703 721 module_file = module.__file__
704 722 if module_file[-4:] in ('.pyc', '.pyo'):
705 723 module_file = module_file[:-1]
706 724 for test in tests:
707 725 if not test.examples:
708 726 continue
709 727 if not test.filename:
710 728 test.filename = module_file
711 729
712 730 yield DocTestCase(test,
713 731 optionflags=optionflags,
714 732 checker=self.checker)
715 733
716 734
717 735 def loadTestsFromFile(self, filename):
718 736 #print 'lTF',filename # dbg
719 737
720 738 if is_extension_module(filename):
721 739 for t in self.loadTestsFromExtensionModule(filename):
722 740 yield t
723 741 else:
724 742 if self.extension and anyp(filename.endswith, self.extension):
725 743 name = os.path.basename(filename)
726 744 dh = open(filename)
727 745 try:
728 746 doc = dh.read()
729 747 finally:
730 748 dh.close()
731 749 test = self.parser.get_doctest(
732 750 doc, globs={'__file__': filename}, name=name,
733 751 filename=filename, lineno=0)
734 752 if test.examples:
735 753 #print 'FileCase:',test.examples # dbg
736 754 yield DocFileCase(test)
737 755 else:
738 756 yield False # no tests to load
739 757
740 758 def wantFile(self,filename):
741 759 """Return whether the given filename should be scanned for tests.
742 760
743 761 Modified version that accepts extension modules as valid containers for
744 762 doctests.
745 763 """
746 #print 'Filename:',filename # dbg
764 print 'Filename:',filename # dbg
747 765
748 766 # XXX - temporarily hardcoded list, will move to driver later
749 767 exclude = ['IPython/external/',
750 'IPython/Extensions/ipy_',
751 768 'IPython/platutils_win32',
752 769 'IPython/frontend/cocoa',
753 770 'IPython_doctest_plugin',
754 771 'IPython/Gnuplot',
755 'IPython/Extensions/PhysicalQIn']
772 'IPython/Extensions/ipy_',
773 'IPython/Extensions/PhysicalQIn',
774 'IPython/Extensions/scitedirector',
775 'IPython/testing/plugin',
776 ]
756 777
757 778 for fex in exclude:
758 779 if fex in filename: # substring
759 780 #print '###>>> SKIP:',filename # dbg
760 781 return False
761 782
762 783 if is_extension_module(filename):
763 784 return True
764 785 else:
765 786 return doctests.Doctest.wantFile(self,filename)
766 787
767 788
768 789 class IPythonDoctest(ExtensionDoctest):
769 790 """Nose Plugin that supports doctests in extension modules.
770 791 """
771 792 name = 'ipdoctest' # call nosetests with --with-ipdoctest
772 793 enabled = True
773 794
774 795 def configure(self, options, config):
775 796
776 797 Plugin.configure(self, options, config)
777 798 self.doctest_tests = options.doctest_tests
778 799 self.extension = tolist(options.doctestExtension)
779 800
780 801 self.parser = IPDocTestParser()
781 802 self.finder = DocTestFinder(parser=self.parser)
782 803 self.checker = IPDoctestOutputChecker()
783 804 self.globs = None
784 805 self.extraglobs = None
806
@@ -1,180 +1,51
1 """Some simple tests for the plugin while running scripts.
2 """
1 3 # Module imports
2 4 # Std lib
3 5 import inspect
4 6
5 # Third party
6
7 7 # Our own
8 8 from IPython.testing import decorators as dec
9 9
10 10 #-----------------------------------------------------------------------------
11 # Utilities
12
13 # Note: copied from OInspect, kept here so the testing stuff doesn't create
14 # circular dependencies and is easier to reuse.
15 def getargspec(obj):
16 """Get the names and default values of a function's arguments.
17
18 A tuple of four things is returned: (args, varargs, varkw, defaults).
19 'args' is a list of the argument names (it may contain nested lists).
20 'varargs' and 'varkw' are the names of the * and ** arguments or None.
21 'defaults' is an n-tuple of the default values of the last n arguments.
22
23 Modified version of inspect.getargspec from the Python Standard
24 Library."""
25
26 if inspect.isfunction(obj):
27 func_obj = obj
28 elif inspect.ismethod(obj):
29 func_obj = obj.im_func
30 else:
31 raise TypeError, 'arg is not a Python function'
32 args, varargs, varkw = inspect.getargs(func_obj.func_code)
33 return args, varargs, varkw, func_obj.func_defaults
34
35 #-----------------------------------------------------------------------------
36 11 # Testing functions
37 12
38 13 def test_trivial():
39 14 """A trivial passing test."""
40 15 pass
41 16
42
43 @dec.skip
44 def test_deliberately_broken():
45 """A deliberately broken test - we want to skip this one."""
46 1/0
47
48
49 # Verify that we can correctly skip the doctest for a function at will, but
50 # that the docstring itself is NOT destroyed by the decorator.
51 @dec.skip_doctest
52 def doctest_bad(x,y=1,**k):
53 """A function whose doctest we need to skip.
54
55 >>> 1+1
56 3
57 """
58 print 'x:',x
59 print 'y:',y
60 print 'k:',k
61
62
63 def call_doctest_bad():
64 """Check that we can still call the decorated functions.
65
66 >>> doctest_bad(3,y=4)
67 x: 3
68 y: 4
69 k: {}
70 """
71 pass
72
73
74 # Doctest skipping should work for class methods too
75 class foo(object):
76 """Foo
77
78 Example:
79
80 >>> 1+1
81 2
82 """
83
84 @dec.skip_doctest
85 def __init__(self,x):
86 """Make a foo.
87
88 Example:
89
90 >>> f = foo(3)
91 junk
92 """
93 print 'Making a foo.'
94 self.x = x
95
96 @dec.skip_doctest
97 def bar(self,y):
98 """Example:
99
100 >>> f = foo(3)
101 >>> f.bar(0)
102 boom!
103 >>> 1/0
104 bam!
105 """
106 return 1/y
107
108 def baz(self,y):
109 """Example:
110
111 >>> f = foo(3)
112 Making a foo.
113 >>> f.baz(3)
114 True
115 """
116 return self.x==y
117
118
119 def test_skip_dt_decorator():
120 """Doctest-skipping decorator should preserve the docstring.
121 """
122 # Careful: 'check' must be a *verbatim* copy of the doctest_bad docstring!
123 check = """A function whose doctest we need to skip.
124
125 >>> 1+1
126 3
127 """
128 # Fetch the docstring from doctest_bad after decoration.
129 val = doctest_bad.__doc__
130
131 assert check==val,"doctest_bad docstrings don't match"
132
133
134 def test_skip_dt_decorator2():
135 """Doctest-skipping decorator should preserve function signature.
136 """
137 # Hardcoded correct answer
138 dtargs = (['x', 'y'], None, 'k', (1,))
139 # Introspect out the value
140 dtargsr = getargspec(doctest_bad)
141 assert dtargsr==dtargs, \
142 "Incorrectly reconstructed args for doctest_bad: %s" % (dtargsr,)
143
144
145 17 def doctest_run():
146 18 """Test running a trivial script.
147 19
148 20 In [13]: run simplevars.py
149 21 x is: 1
150 22 """
151 23
152 #@dec.skip_doctest
153 24 def doctest_runvars():
154 25 """Test that variables defined in scripts get loaded correcly via %run.
155 26
156 27 In [13]: run simplevars.py
157 28 x is: 1
158 29
159 30 In [14]: x
160 31 Out[14]: 1
161 32 """
162 33
163 34 def doctest_ivars():
164 35 """Test that variables defined interactively are picked up.
165 36 In [5]: zz=1
166 37
167 38 In [6]: zz
168 39 Out[6]: 1
169 40 """
170 41
171 42 @dec.skip_doctest
172 43 def doctest_refs():
173 44 """DocTest reference holding issues when running scripts.
174 45
175 46 In [32]: run show_refs.py
176 47 c referrers: [<type 'dict'>]
177 48
178 49 In [33]: map(type,gc.get_referrers(c))
179 50 Out[33]: [<type 'dict'>]
180 51 """
@@ -1,86 +1,87
1 1 """DEPRECATED - use IPython.testing.util instead.
2 2
3 3 Utilities for testing code.
4 4 """
5 5
6 6 #############################################################################
7 7
8 8 # This was old testing code we never really used in IPython. The pieces of
9 9 # testing machinery from snakeoil that were good have already been merged into
10 10 # the nose plugin, so this can be taken away soon. Leave a warning for now,
11 11 # we'll remove it in a later release (around 0.10 or so).
12
12 13 from warnings import warn
13 14 warn('This will be removed soon. Use IPython.testing.util instead',
14 15 DeprecationWarning)
15 16
16 17 #############################################################################
17 18
18 19 # Required modules and packages
19 20
20 21 # Standard Python lib
21 22 import os
22 23 import sys
23 24
24 25 # From this project
25 26 from IPython.tools import utils
26 27
27 28 # path to our own installation, so we can find source files under this.
28 29 TEST_PATH = os.path.dirname(os.path.abspath(__file__))
29 30
30 31 # Global flag, used by vprint
31 32 VERBOSE = '-v' in sys.argv or '--verbose' in sys.argv
32 33
33 34 ##########################################################################
34 35 # Code begins
35 36
36 37 # Some utility functions
37 38 def vprint(*args):
38 39 """Print-like function which relies on a global VERBOSE flag."""
39 40 if not VERBOSE:
40 41 return
41 42
42 43 write = sys.stdout.write
43 44 for item in args:
44 45 write(str(item))
45 46 write('\n')
46 47 sys.stdout.flush()
47 48
48 49 def test_path(path):
49 50 """Return a path as a subdir of the test package.
50 51
51 52 This finds the correct path of the test package on disk, and prepends it
52 53 to the input path."""
53 54
54 55 return os.path.join(TEST_PATH,path)
55 56
56 57 def fullPath(startPath,files):
57 58 """Make full paths for all the listed files, based on startPath.
58 59
59 60 Only the base part of startPath is kept, since this routine is typically
60 61 used with a script's __file__ variable as startPath. The base of startPath
61 62 is then prepended to all the listed files, forming the output list.
62 63
63 64 :Parameters:
64 65 startPath : string
65 66 Initial path to use as the base for the results. This path is split
66 67 using os.path.split() and only its first component is kept.
67 68
68 69 files : string or list
69 70 One or more files.
70 71
71 72 :Examples:
72 73
73 74 >>> fullPath('/foo/bar.py',['a.txt','b.txt'])
74 75 ['/foo/a.txt', '/foo/b.txt']
75 76
76 77 >>> fullPath('/foo',['a.txt','b.txt'])
77 78 ['/a.txt', '/b.txt']
78 79
79 80 If a single file is given, the output is still a list:
80 81 >>> fullPath('/foo','a.txt')
81 82 ['/a.txt']
82 83 """
83 84
84 85 files = utils.list_strings(files)
85 86 base = os.path.split(startPath)[0]
86 87 return [ os.path.join(base,f) for f in files ]
@@ -1,1059 +1,1062
1 1 # -*- coding: utf-8 -*-
2 2 """
3 3 ultraTB.py -- Spice up your tracebacks!
4 4
5 5 * ColorTB
6 6 I've always found it a bit hard to visually parse tracebacks in Python. The
7 7 ColorTB class is a solution to that problem. It colors the different parts of a
8 8 traceback in a manner similar to what you would expect from a syntax-highlighting
9 9 text editor.
10 10
11 11 Installation instructions for ColorTB:
12 12 import sys,ultraTB
13 13 sys.excepthook = ultraTB.ColorTB()
14 14
15 15 * VerboseTB
16 16 I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds
17 17 of useful info when a traceback occurs. Ping originally had it spit out HTML
18 18 and intended it for CGI programmers, but why should they have all the fun? I
19 19 altered it to spit out colored text to the terminal. It's a bit overwhelming,
20 20 but kind of neat, and maybe useful for long-running programs that you believe
21 21 are bug-free. If a crash *does* occur in that type of program you want details.
22 22 Give it a shot--you'll love it or you'll hate it.
23 23
24 24 Note:
25 25
26 26 The Verbose mode prints the variables currently visible where the exception
27 27 happened (shortening their strings if too long). This can potentially be
28 28 very slow, if you happen to have a huge data structure whose string
29 29 representation is complex to compute. Your computer may appear to freeze for
30 30 a while with cpu usage at 100%. If this occurs, you can cancel the traceback
31 31 with Ctrl-C (maybe hitting it more than once).
32 32
33 33 If you encounter this kind of situation often, you may want to use the
34 34 Verbose_novars mode instead of the regular Verbose, which avoids formatting
35 35 variables (but otherwise includes the information and context given by
36 36 Verbose).
37 37
38 38
39 39 Installation instructions for ColorTB:
40 40 import sys,ultraTB
41 41 sys.excepthook = ultraTB.VerboseTB()
42 42
43 43 Note: Much of the code in this module was lifted verbatim from the standard
44 44 library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'.
45 45
46 46 * Color schemes
47 47 The colors are defined in the class TBTools through the use of the
48 48 ColorSchemeTable class. Currently the following exist:
49 49
50 50 - NoColor: allows all of this module to be used in any terminal (the color
51 51 escapes are just dummy blank strings).
52 52
53 53 - Linux: is meant to look good in a terminal like the Linux console (black
54 54 or very dark background).
55 55
56 56 - LightBG: similar to Linux but swaps dark/light colors to be more readable
57 57 in light background terminals.
58 58
59 59 You can implement other color schemes easily, the syntax is fairly
60 60 self-explanatory. Please send back new schemes you develop to the author for
61 61 possible inclusion in future releases.
62 62
63 63 $Id: ultraTB.py 2908 2007-12-30 21:07:46Z vivainio $"""
64 64
65 65 #*****************************************************************************
66 66 # Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu>
67 67 # Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu>
68 68 #
69 69 # Distributed under the terms of the BSD License. The full license is in
70 70 # the file COPYING, distributed as part of this software.
71 71 #*****************************************************************************
72 72
73 73 from IPython import Release
74 74 __author__ = '%s <%s>\n%s <%s>' % (Release.authors['Nathan']+
75 75 Release.authors['Fernando'])
76 76 __license__ = Release.license
77 77
78 78 # Required modules
79 79 import inspect
80 80 import keyword
81 81 import linecache
82 82 import os
83 83 import pydoc
84 84 import re
85 85 import string
86 86 import sys
87 87 import time
88 88 import tokenize
89 89 import traceback
90 90 import types
91 91
92 92 # For purposes of monkeypatching inspect to fix a bug in it.
93 93 from inspect import getsourcefile, getfile, getmodule,\
94 94 ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode
95 95
96 96
97 97 # IPython's own modules
98 98 # Modified pdb which doesn't damage IPython's readline handling
99 99 from IPython import Debugger, PyColorize
100 100 from IPython.ipstruct import Struct
101 101 from IPython.excolors import ExceptionColors
102 102 from IPython.genutils import Term,uniq_stable,error,info
103 103
104 104 # Globals
105 105 # amount of space to put line numbers before verbose tracebacks
106 106 INDENT_SIZE = 8
107 107
108 108 # Default color scheme. This is used, for example, by the traceback
109 109 # formatter. When running in an actual IPython instance, the user's rc.colors
110 110 # value is used, but havinga module global makes this functionality available
111 111 # to users of ultraTB who are NOT running inside ipython.
112 112 DEFAULT_SCHEME = 'NoColor'
113 113
114 114 #---------------------------------------------------------------------------
115 115 # Code begins
116 116
117 117 # Utility functions
118 118 def inspect_error():
119 119 """Print a message about internal inspect errors.
120 120
121 121 These are unfortunately quite common."""
122 122
123 123 error('Internal Python error in the inspect module.\n'
124 124 'Below is the traceback from this internal error.\n')
125 125
126 126
127 127 def findsource(object):
128 128 """Return the entire source file and starting line number for an object.
129 129
130 130 The argument may be a module, class, method, function, traceback, frame,
131 131 or code object. The source code is returned as a list of all the lines
132 132 in the file and the line number indexes a line in that list. An IOError
133 133 is raised if the source code cannot be retrieved.
134 134
135 135 FIXED version with which we monkeypatch the stdlib to work around a bug."""
136 136
137 137 file = getsourcefile(object) or getfile(object)
138 138 # If the object is a frame, then trying to get the globals dict from its
139 139 # module won't work. Instead, the frame object itself has the globals
140 140 # dictionary.
141 141 globals_dict = None
142 142 if inspect.isframe(object):
143 143 # XXX: can this ever be false?
144 144 globals_dict = object.f_globals
145 145 else:
146 146 module = getmodule(object, file)
147 147 if module:
148 148 globals_dict = module.__dict__
149 149 lines = linecache.getlines(file, globals_dict)
150 150 if not lines:
151 151 raise IOError('could not get source code')
152 152
153 153 if ismodule(object):
154 154 return lines, 0
155 155
156 156 if isclass(object):
157 157 name = object.__name__
158 158 pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
159 159 # make some effort to find the best matching class definition:
160 160 # use the one with the least indentation, which is the one
161 161 # that's most probably not inside a function definition.
162 162 candidates = []
163 163 for i in range(len(lines)):
164 164 match = pat.match(lines[i])
165 165 if match:
166 166 # if it's at toplevel, it's already the best one
167 167 if lines[i][0] == 'c':
168 168 return lines, i
169 169 # else add whitespace to candidate list
170 170 candidates.append((match.group(1), i))
171 171 if candidates:
172 172 # this will sort by whitespace, and by line number,
173 173 # less whitespace first
174 174 candidates.sort()
175 175 return lines, candidates[0][1]
176 176 else:
177 177 raise IOError('could not find class definition')
178 178
179 179 if ismethod(object):
180 180 object = object.im_func
181 181 if isfunction(object):
182 182 object = object.func_code
183 183 if istraceback(object):
184 184 object = object.tb_frame
185 185 if isframe(object):
186 186 object = object.f_code
187 187 if iscode(object):
188 188 if not hasattr(object, 'co_firstlineno'):
189 189 raise IOError('could not find function definition')
190 190 pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
191 191 pmatch = pat.match
192 192 # fperez - fix: sometimes, co_firstlineno can give a number larger than
193 193 # the length of lines, which causes an error. Safeguard against that.
194 194 lnum = min(object.co_firstlineno,len(lines))-1
195 195 while lnum > 0:
196 196 if pmatch(lines[lnum]): break
197 197 lnum -= 1
198 198
199 199 return lines, lnum
200 200 raise IOError('could not find code object')
201 201
202 202 # Monkeypatch inspect to apply our bugfix. This code only works with py25
203 203 if sys.version_info[:2] >= (2,5):
204 204 inspect.findsource = findsource
205 205
206 206 def fix_frame_records_filenames(records):
207 207 """Try to fix the filenames in each record from inspect.getinnerframes().
208 208
209 209 Particularly, modules loaded from within zip files have useless filenames
210 210 attached to their code object, and inspect.getinnerframes() just uses it.
211 211 """
212 212 fixed_records = []
213 213 for frame, filename, line_no, func_name, lines, index in records:
214 214 # Look inside the frame's globals dictionary for __file__, which should
215 215 # be better.
216 216 better_fn = frame.f_globals.get('__file__', None)
217 217 if isinstance(better_fn, str):
218 218 # Check the type just in case someone did something weird with
219 219 # __file__. It might also be None if the error occurred during
220 220 # import.
221 221 filename = better_fn
222 222 fixed_records.append((frame, filename, line_no, func_name, lines, index))
223 223 return fixed_records
224 224
225 225
226 226 def _fixed_getinnerframes(etb, context=1,tb_offset=0):
227 227 import linecache
228 228 LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5
229 229
230 230 records = fix_frame_records_filenames(inspect.getinnerframes(etb, context))
231 231
232 232 # If the error is at the console, don't build any context, since it would
233 233 # otherwise produce 5 blank lines printed out (there is no file at the
234 234 # console)
235 235 rec_check = records[tb_offset:]
236 236 try:
237 237 rname = rec_check[0][1]
238 238 if rname == '<ipython console>' or rname.endswith('<string>'):
239 239 return rec_check
240 240 except IndexError:
241 241 pass
242 242
243 243 aux = traceback.extract_tb(etb)
244 244 assert len(records) == len(aux)
245 245 for i, (file, lnum, _, _) in zip(range(len(records)), aux):
246 246 maybeStart = lnum-1 - context//2
247 247 start = max(maybeStart, 0)
248 248 end = start + context
249 249 lines = linecache.getlines(file)[start:end]
250 250 # pad with empty lines if necessary
251 251 if maybeStart < 0:
252 252 lines = (['\n'] * -maybeStart) + lines
253 253 if len(lines) < context:
254 254 lines += ['\n'] * (context - len(lines))
255 255 buf = list(records[i])
256 256 buf[LNUM_POS] = lnum
257 257 buf[INDEX_POS] = lnum - 1 - start
258 258 buf[LINES_POS] = lines
259 259 records[i] = tuple(buf)
260 260 return records[tb_offset:]
261 261
262 262 # Helper function -- largely belongs to VerboseTB, but we need the same
263 263 # functionality to produce a pseudo verbose TB for SyntaxErrors, so that they
264 264 # can be recognized properly by ipython.el's py-traceback-line-re
265 265 # (SyntaxErrors have to be treated specially because they have no traceback)
266 266
267 267 _parser = PyColorize.Parser()
268 268
269 269 def _formatTracebackLines(lnum, index, lines, Colors, lvals=None,scheme=None):
270 270 numbers_width = INDENT_SIZE - 1
271 271 res = []
272 272 i = lnum - index
273 273
274 274 # This lets us get fully syntax-highlighted tracebacks.
275 275 if scheme is None:
276 276 try:
277 277 scheme = __IPYTHON__.rc.colors
278 278 except:
279 279 scheme = DEFAULT_SCHEME
280 280 _line_format = _parser.format2
281 281
282 282 for line in lines:
283 283 new_line, err = _line_format(line,'str',scheme)
284 284 if not err: line = new_line
285 285
286 286 if i == lnum:
287 287 # This is the line with the error
288 288 pad = numbers_width - len(str(i))
289 289 if pad >= 3:
290 290 marker = '-'*(pad-3) + '-> '
291 291 elif pad == 2:
292 292 marker = '> '
293 293 elif pad == 1:
294 294 marker = '>'
295 295 else:
296 296 marker = ''
297 297 num = marker + str(i)
298 298 line = '%s%s%s %s%s' %(Colors.linenoEm, num,
299 299 Colors.line, line, Colors.Normal)
300 300 else:
301 301 num = '%*s' % (numbers_width,i)
302 302 line = '%s%s%s %s' %(Colors.lineno, num,
303 303 Colors.Normal, line)
304 304
305 305 res.append(line)
306 306 if lvals and i == lnum:
307 307 res.append(lvals + '\n')
308 308 i = i + 1
309 309 return res
310 310
311 311
312 312 #---------------------------------------------------------------------------
313 313 # Module classes
314 314 class TBTools:
315 315 """Basic tools used by all traceback printer classes."""
316 316
317 317 def __init__(self,color_scheme = 'NoColor',call_pdb=False):
318 318 # Whether to call the interactive pdb debugger after printing
319 319 # tracebacks or not
320 320 self.call_pdb = call_pdb
321 321
322 322 # Create color table
323 323 self.color_scheme_table = ExceptionColors
324 324
325 325 self.set_colors(color_scheme)
326 326 self.old_scheme = color_scheme # save initial value for toggles
327 327
328 328 if call_pdb:
329 329 self.pdb = Debugger.Pdb(self.color_scheme_table.active_scheme_name)
330 330 else:
331 331 self.pdb = None
332 332
333 333 def set_colors(self,*args,**kw):
334 334 """Shorthand access to the color table scheme selector method."""
335 335
336 336 # Set own color table
337 337 self.color_scheme_table.set_active_scheme(*args,**kw)
338 338 # for convenience, set Colors to the active scheme
339 339 self.Colors = self.color_scheme_table.active_colors
340 340 # Also set colors of debugger
341 341 if hasattr(self,'pdb') and self.pdb is not None:
342 342 self.pdb.set_colors(*args,**kw)
343 343
344 344 def color_toggle(self):
345 345 """Toggle between the currently active color scheme and NoColor."""
346 346
347 347 if self.color_scheme_table.active_scheme_name == 'NoColor':
348 348 self.color_scheme_table.set_active_scheme(self.old_scheme)
349 349 self.Colors = self.color_scheme_table.active_colors
350 350 else:
351 351 self.old_scheme = self.color_scheme_table.active_scheme_name
352 352 self.color_scheme_table.set_active_scheme('NoColor')
353 353 self.Colors = self.color_scheme_table.active_colors
354 354
355 355 #---------------------------------------------------------------------------
356 356 class ListTB(TBTools):
357 357 """Print traceback information from a traceback list, with optional color.
358 358
359 359 Calling: requires 3 arguments:
360 360 (etype, evalue, elist)
361 361 as would be obtained by:
362 362 etype, evalue, tb = sys.exc_info()
363 363 if tb:
364 364 elist = traceback.extract_tb(tb)
365 365 else:
366 366 elist = None
367 367
368 368 It can thus be used by programs which need to process the traceback before
369 369 printing (such as console replacements based on the code module from the
370 370 standard library).
371 371
372 372 Because they are meant to be called without a full traceback (only a
373 373 list), instances of this class can't call the interactive pdb debugger."""
374 374
375 375 def __init__(self,color_scheme = 'NoColor'):
376 376 TBTools.__init__(self,color_scheme = color_scheme,call_pdb=0)
377 377
378 378 def __call__(self, etype, value, elist):
379 379 Term.cout.flush()
380 380 print >> Term.cerr, self.text(etype,value,elist)
381 381 Term.cerr.flush()
382 382
383 383 def text(self,etype, value, elist,context=5):
384 384 """Return a color formatted string with the traceback info."""
385 385
386 386 Colors = self.Colors
387 387 out_string = ['%s%s%s\n' % (Colors.topline,'-'*60,Colors.Normal)]
388 388 if elist:
389 389 out_string.append('Traceback %s(most recent call last)%s:' % \
390 390 (Colors.normalEm, Colors.Normal) + '\n')
391 391 out_string.extend(self._format_list(elist))
392 392 lines = self._format_exception_only(etype, value)
393 393 for line in lines[:-1]:
394 394 out_string.append(" "+line)
395 395 out_string.append(lines[-1])
396 396 return ''.join(out_string)
397 397
398 398 def _format_list(self, extracted_list):
399 399 """Format a list of traceback entry tuples for printing.
400 400
401 401 Given a list of tuples as returned by extract_tb() or
402 402 extract_stack(), return a list of strings ready for printing.
403 403 Each string in the resulting list corresponds to the item with the
404 404 same index in the argument list. Each string ends in a newline;
405 405 the strings may contain internal newlines as well, for those items
406 406 whose source text line is not None.
407 407
408 408 Lifted almost verbatim from traceback.py
409 409 """
410 410
411 411 Colors = self.Colors
412 412 list = []
413 413 for filename, lineno, name, line in extracted_list[:-1]:
414 414 item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \
415 415 (Colors.filename, filename, Colors.Normal,
416 416 Colors.lineno, lineno, Colors.Normal,
417 417 Colors.name, name, Colors.Normal)
418 418 if line:
419 419 item = item + ' %s\n' % line.strip()
420 420 list.append(item)
421 421 # Emphasize the last entry
422 422 filename, lineno, name, line = extracted_list[-1]
423 423 item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \
424 424 (Colors.normalEm,
425 425 Colors.filenameEm, filename, Colors.normalEm,
426 426 Colors.linenoEm, lineno, Colors.normalEm,
427 427 Colors.nameEm, name, Colors.normalEm,
428 428 Colors.Normal)
429 429 if line:
430 430 item = item + '%s %s%s\n' % (Colors.line, line.strip(),
431 431 Colors.Normal)
432 432 list.append(item)
433 433 return list
434 434
435 435 def _format_exception_only(self, etype, value):
436 436 """Format the exception part of a traceback.
437 437
438 438 The arguments are the exception type and value such as given by
439 439 sys.exc_info()[:2]. The return value is a list of strings, each ending
440 440 in a newline. Normally, the list contains a single string; however,
441 441 for SyntaxError exceptions, it contains several lines that (when
442 442 printed) display detailed information about where the syntax error
443 443 occurred. The message indicating which exception occurred is the
444 444 always last string in the list.
445 445
446 446 Also lifted nearly verbatim from traceback.py
447 447 """
448
448
449 have_filedata = False
449 450 Colors = self.Colors
450 451 list = []
451 452 try:
452 453 stype = Colors.excName + etype.__name__ + Colors.Normal
453 454 except AttributeError:
454 455 stype = etype # String exceptions don't get special coloring
455 456 if value is None:
456 457 list.append( str(stype) + '\n')
457 458 else:
458 459 if etype is SyntaxError:
459 460 try:
460 461 msg, (filename, lineno, offset, line) = value
461 462 except:
462 pass
463 have_filedata = False
463 464 else:
465 have_filedata = True
464 466 #print 'filename is',filename # dbg
465 467 if not filename: filename = "<string>"
466 468 list.append('%s File %s"%s"%s, line %s%d%s\n' % \
467 469 (Colors.normalEm,
468 470 Colors.filenameEm, filename, Colors.normalEm,
469 471 Colors.linenoEm, lineno, Colors.Normal ))
470 472 if line is not None:
471 473 i = 0
472 474 while i < len(line) and line[i].isspace():
473 475 i = i+1
474 476 list.append('%s %s%s\n' % (Colors.line,
475 477 line.strip(),
476 478 Colors.Normal))
477 479 if offset is not None:
478 480 s = ' '
479 481 for c in line[i:offset-1]:
480 482 if c.isspace():
481 483 s = s + c
482 484 else:
483 485 s = s + ' '
484 486 list.append('%s%s^%s\n' % (Colors.caret, s,
485 487 Colors.Normal) )
486 488 value = msg
487 489 s = self._some_str(value)
488 490 if s:
489 491 list.append('%s%s:%s %s\n' % (str(stype), Colors.excName,
490 492 Colors.Normal, s))
491 493 else:
492 494 list.append('%s\n' % str(stype))
493 495
494 496 # vds:>>
495 __IPYTHON__.hooks.synchronize_with_editor(filename, lineno, 0)
497 if have_filedata:
498 __IPYTHON__.hooks.synchronize_with_editor(filename, lineno, 0)
496 499 # vds:<<
497 500
498 501 return list
499 502
500 503 def _some_str(self, value):
501 504 # Lifted from traceback.py
502 505 try:
503 506 return str(value)
504 507 except:
505 508 return '<unprintable %s object>' % type(value).__name__
506 509
507 510 #----------------------------------------------------------------------------
508 511 class VerboseTB(TBTools):
509 512 """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead
510 513 of HTML. Requires inspect and pydoc. Crazy, man.
511 514
512 515 Modified version which optionally strips the topmost entries from the
513 516 traceback, to be used with alternate interpreters (because their own code
514 517 would appear in the traceback)."""
515 518
516 519 def __init__(self,color_scheme = 'Linux',tb_offset=0,long_header=0,
517 520 call_pdb = 0, include_vars=1):
518 521 """Specify traceback offset, headers and color scheme.
519 522
520 523 Define how many frames to drop from the tracebacks. Calling it with
521 524 tb_offset=1 allows use of this handler in interpreters which will have
522 525 their own code at the top of the traceback (VerboseTB will first
523 526 remove that frame before printing the traceback info)."""
524 527 TBTools.__init__(self,color_scheme=color_scheme,call_pdb=call_pdb)
525 528 self.tb_offset = tb_offset
526 529 self.long_header = long_header
527 530 self.include_vars = include_vars
528 531
529 532 def text(self, etype, evalue, etb, context=5):
530 533 """Return a nice text document describing the traceback."""
531 534
532 535 # some locals
533 536 try:
534 537 etype = etype.__name__
535 538 except AttributeError:
536 539 pass
537 540 Colors = self.Colors # just a shorthand + quicker name lookup
538 541 ColorsNormal = Colors.Normal # used a lot
539 542 col_scheme = self.color_scheme_table.active_scheme_name
540 543 indent = ' '*INDENT_SIZE
541 544 em_normal = '%s\n%s%s' % (Colors.valEm, indent,ColorsNormal)
542 545 undefined = '%sundefined%s' % (Colors.em, ColorsNormal)
543 546 exc = '%s%s%s' % (Colors.excName,etype,ColorsNormal)
544 547
545 548 # some internal-use functions
546 549 def text_repr(value):
547 550 """Hopefully pretty robust repr equivalent."""
548 551 # this is pretty horrible but should always return *something*
549 552 try:
550 553 return pydoc.text.repr(value)
551 554 except KeyboardInterrupt:
552 555 raise
553 556 except:
554 557 try:
555 558 return repr(value)
556 559 except KeyboardInterrupt:
557 560 raise
558 561 except:
559 562 try:
560 563 # all still in an except block so we catch
561 564 # getattr raising
562 565 name = getattr(value, '__name__', None)
563 566 if name:
564 567 # ick, recursion
565 568 return text_repr(name)
566 569 klass = getattr(value, '__class__', None)
567 570 if klass:
568 571 return '%s instance' % text_repr(klass)
569 572 except KeyboardInterrupt:
570 573 raise
571 574 except:
572 575 return 'UNRECOVERABLE REPR FAILURE'
573 576 def eqrepr(value, repr=text_repr): return '=%s' % repr(value)
574 577 def nullrepr(value, repr=text_repr): return ''
575 578
576 579 # meat of the code begins
577 580 try:
578 581 etype = etype.__name__
579 582 except AttributeError:
580 583 pass
581 584
582 585 if self.long_header:
583 586 # Header with the exception type, python version, and date
584 587 pyver = 'Python ' + string.split(sys.version)[0] + ': ' + sys.executable
585 588 date = time.ctime(time.time())
586 589
587 590 head = '%s%s%s\n%s%s%s\n%s' % (Colors.topline, '-'*75, ColorsNormal,
588 591 exc, ' '*(75-len(str(etype))-len(pyver)),
589 592 pyver, string.rjust(date, 75) )
590 593 head += "\nA problem occured executing Python code. Here is the sequence of function"\
591 594 "\ncalls leading up to the error, with the most recent (innermost) call last."
592 595 else:
593 596 # Simplified header
594 597 head = '%s%s%s\n%s%s' % (Colors.topline, '-'*75, ColorsNormal,exc,
595 598 string.rjust('Traceback (most recent call last)',
596 599 75 - len(str(etype)) ) )
597 600 frames = []
598 601 # Flush cache before calling inspect. This helps alleviate some of the
599 602 # problems with python 2.3's inspect.py.
600 603 linecache.checkcache()
601 604 # Drop topmost frames if requested
602 605 try:
603 606 # Try the default getinnerframes and Alex's: Alex's fixes some
604 607 # problems, but it generates empty tracebacks for console errors
605 608 # (5 blanks lines) where none should be returned.
606 609 #records = inspect.getinnerframes(etb, context)[self.tb_offset:]
607 610 #print 'python records:', records # dbg
608 611 records = _fixed_getinnerframes(etb, context,self.tb_offset)
609 612 #print 'alex records:', records # dbg
610 613 except:
611 614
612 615 # FIXME: I've been getting many crash reports from python 2.3
613 616 # users, traceable to inspect.py. If I can find a small test-case
614 617 # to reproduce this, I should either write a better workaround or
615 618 # file a bug report against inspect (if that's the real problem).
616 619 # So far, I haven't been able to find an isolated example to
617 620 # reproduce the problem.
618 621 inspect_error()
619 622 traceback.print_exc(file=Term.cerr)
620 623 info('\nUnfortunately, your original traceback can not be constructed.\n')
621 624 return ''
622 625
623 626 # build some color string templates outside these nested loops
624 627 tpl_link = '%s%%s%s' % (Colors.filenameEm,ColorsNormal)
625 628 tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm,
626 629 ColorsNormal)
627 630 tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \
628 631 (Colors.vName, Colors.valEm, ColorsNormal)
629 632 tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal)
630 633 tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal,
631 634 Colors.vName, ColorsNormal)
632 635 tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal)
633 636 tpl_line = '%s%%s%s %%s' % (Colors.lineno, ColorsNormal)
634 637 tpl_line_em = '%s%%s%s %%s%s' % (Colors.linenoEm,Colors.line,
635 638 ColorsNormal)
636 639
637 640 # now, loop over all records printing context and info
638 641 abspath = os.path.abspath
639 642 for frame, file, lnum, func, lines, index in records:
640 643 #print '*** record:',file,lnum,func,lines,index # dbg
641 644 try:
642 645 file = file and abspath(file) or '?'
643 646 except OSError:
644 647 # if file is '<console>' or something not in the filesystem,
645 648 # the abspath call will throw an OSError. Just ignore it and
646 649 # keep the original file string.
647 650 pass
648 651 link = tpl_link % file
649 652 try:
650 653 args, varargs, varkw, locals = inspect.getargvalues(frame)
651 654 except:
652 655 # This can happen due to a bug in python2.3. We should be
653 656 # able to remove this try/except when 2.4 becomes a
654 657 # requirement. Bug details at http://python.org/sf/1005466
655 658 inspect_error()
656 659 traceback.print_exc(file=Term.cerr)
657 660 info("\nIPython's exception reporting continues...\n")
658 661
659 662 if func == '?':
660 663 call = ''
661 664 else:
662 665 # Decide whether to include variable details or not
663 666 var_repr = self.include_vars and eqrepr or nullrepr
664 667 try:
665 668 call = tpl_call % (func,inspect.formatargvalues(args,
666 669 varargs, varkw,
667 670 locals,formatvalue=var_repr))
668 671 except KeyError:
669 672 # Very odd crash from inspect.formatargvalues(). The
670 673 # scenario under which it appeared was a call to
671 674 # view(array,scale) in NumTut.view.view(), where scale had
672 675 # been defined as a scalar (it should be a tuple). Somehow
673 676 # inspect messes up resolving the argument list of view()
674 677 # and barfs out. At some point I should dig into this one
675 678 # and file a bug report about it.
676 679 inspect_error()
677 680 traceback.print_exc(file=Term.cerr)
678 681 info("\nIPython's exception reporting continues...\n")
679 682 call = tpl_call_fail % func
680 683
681 684 # Initialize a list of names on the current line, which the
682 685 # tokenizer below will populate.
683 686 names = []
684 687
685 688 def tokeneater(token_type, token, start, end, line):
686 689 """Stateful tokeneater which builds dotted names.
687 690
688 691 The list of names it appends to (from the enclosing scope) can
689 692 contain repeated composite names. This is unavoidable, since
690 693 there is no way to disambguate partial dotted structures until
691 694 the full list is known. The caller is responsible for pruning
692 695 the final list of duplicates before using it."""
693 696
694 697 # build composite names
695 698 if token == '.':
696 699 try:
697 700 names[-1] += '.'
698 701 # store state so the next token is added for x.y.z names
699 702 tokeneater.name_cont = True
700 703 return
701 704 except IndexError:
702 705 pass
703 706 if token_type == tokenize.NAME and token not in keyword.kwlist:
704 707 if tokeneater.name_cont:
705 708 # Dotted names
706 709 names[-1] += token
707 710 tokeneater.name_cont = False
708 711 else:
709 712 # Regular new names. We append everything, the caller
710 713 # will be responsible for pruning the list later. It's
711 714 # very tricky to try to prune as we go, b/c composite
712 715 # names can fool us. The pruning at the end is easy
713 716 # to do (or the caller can print a list with repeated
714 717 # names if so desired.
715 718 names.append(token)
716 719 elif token_type == tokenize.NEWLINE:
717 720 raise IndexError
718 721 # we need to store a bit of state in the tokenizer to build
719 722 # dotted names
720 723 tokeneater.name_cont = False
721 724
722 725 def linereader(file=file, lnum=[lnum], getline=linecache.getline):
723 726 line = getline(file, lnum[0])
724 727 lnum[0] += 1
725 728 return line
726 729
727 730 # Build the list of names on this line of code where the exception
728 731 # occurred.
729 732 try:
730 733 # This builds the names list in-place by capturing it from the
731 734 # enclosing scope.
732 735 tokenize.tokenize(linereader, tokeneater)
733 736 except IndexError:
734 737 # signals exit of tokenizer
735 738 pass
736 739 except tokenize.TokenError,msg:
737 740 _m = ("An unexpected error occurred while tokenizing input\n"
738 741 "The following traceback may be corrupted or invalid\n"
739 742 "The error message is: %s\n" % msg)
740 743 error(_m)
741 744
742 745 # prune names list of duplicates, but keep the right order
743 746 unique_names = uniq_stable(names)
744 747
745 748 # Start loop over vars
746 749 lvals = []
747 750 if self.include_vars:
748 751 for name_full in unique_names:
749 752 name_base = name_full.split('.',1)[0]
750 753 if name_base in frame.f_code.co_varnames:
751 754 if locals.has_key(name_base):
752 755 try:
753 756 value = repr(eval(name_full,locals))
754 757 except:
755 758 value = undefined
756 759 else:
757 760 value = undefined
758 761 name = tpl_local_var % name_full
759 762 else:
760 763 if frame.f_globals.has_key(name_base):
761 764 try:
762 765 value = repr(eval(name_full,frame.f_globals))
763 766 except:
764 767 value = undefined
765 768 else:
766 769 value = undefined
767 770 name = tpl_global_var % name_full
768 771 lvals.append(tpl_name_val % (name,value))
769 772 if lvals:
770 773 lvals = '%s%s' % (indent,em_normal.join(lvals))
771 774 else:
772 775 lvals = ''
773 776
774 777 level = '%s %s\n' % (link,call)
775 778
776 779 if index is None:
777 780 frames.append(level)
778 781 else:
779 782 frames.append('%s%s' % (level,''.join(
780 783 _formatTracebackLines(lnum,index,lines,Colors,lvals,
781 784 col_scheme))))
782 785
783 786 # Get (safely) a string form of the exception info
784 787 try:
785 788 etype_str,evalue_str = map(str,(etype,evalue))
786 789 except:
787 790 # User exception is improperly defined.
788 791 etype,evalue = str,sys.exc_info()[:2]
789 792 etype_str,evalue_str = map(str,(etype,evalue))
790 793 # ... and format it
791 794 exception = ['%s%s%s: %s' % (Colors.excName, etype_str,
792 795 ColorsNormal, evalue_str)]
793 796 if type(evalue) is types.InstanceType:
794 797 try:
795 798 names = [w for w in dir(evalue) if isinstance(w, basestring)]
796 799 except:
797 800 # Every now and then, an object with funny inernals blows up
798 801 # when dir() is called on it. We do the best we can to report
799 802 # the problem and continue
800 803 _m = '%sException reporting error (object with broken dir())%s:'
801 804 exception.append(_m % (Colors.excName,ColorsNormal))
802 805 etype_str,evalue_str = map(str,sys.exc_info()[:2])
803 806 exception.append('%s%s%s: %s' % (Colors.excName,etype_str,
804 807 ColorsNormal, evalue_str))
805 808 names = []
806 809 for name in names:
807 810 value = text_repr(getattr(evalue, name))
808 811 exception.append('\n%s%s = %s' % (indent, name, value))
809 812
810 813 # vds: >>
811 814 if records:
812 frame, file, lnum, func, lines, index = records[-1]
813 #print "file:", str(file), "linenb", str(lnum)
814 file = abspath(file)
815 __IPYTHON__.hooks.synchronize_with_editor(file, lnum, 0)
815 filepath, lnum = records[-1][1:3]
816 #print "file:", str(file), "linenb", str(lnum) # dbg
817 filepath = os.path.abspath(filepath)
818 __IPYTHON__.hooks.synchronize_with_editor(filepath, lnum, 0)
816 819 # vds: <<
817 820
818 821 # return all our info assembled as a single string
819 822 return '%s\n\n%s\n%s' % (head,'\n'.join(frames),''.join(exception[0]) )
820 823
821 824 def debugger(self,force=False):
822 825 """Call up the pdb debugger if desired, always clean up the tb
823 826 reference.
824 827
825 828 Keywords:
826 829
827 830 - force(False): by default, this routine checks the instance call_pdb
828 831 flag and does not actually invoke the debugger if the flag is false.
829 832 The 'force' option forces the debugger to activate even if the flag
830 833 is false.
831 834
832 835 If the call_pdb flag is set, the pdb interactive debugger is
833 836 invoked. In all cases, the self.tb reference to the current traceback
834 837 is deleted to prevent lingering references which hamper memory
835 838 management.
836 839
837 840 Note that each call to pdb() does an 'import readline', so if your app
838 841 requires a special setup for the readline completers, you'll have to
839 842 fix that by hand after invoking the exception handler."""
840 843
841 844 if force or self.call_pdb:
842 845 if self.pdb is None:
843 846 self.pdb = Debugger.Pdb(
844 847 self.color_scheme_table.active_scheme_name)
845 848 # the system displayhook may have changed, restore the original
846 849 # for pdb
847 850 dhook = sys.displayhook
848 851 sys.displayhook = sys.__displayhook__
849 852 self.pdb.reset()
850 853 # Find the right frame so we don't pop up inside ipython itself
851 854 if hasattr(self,'tb'):
852 855 etb = self.tb
853 856 else:
854 857 etb = self.tb = sys.last_traceback
855 858 while self.tb.tb_next is not None:
856 859 self.tb = self.tb.tb_next
857 860 try:
858 861 if etb and etb.tb_next:
859 862 etb = etb.tb_next
860 863 self.pdb.botframe = etb.tb_frame
861 864 self.pdb.interaction(self.tb.tb_frame, self.tb)
862 865 finally:
863 866 sys.displayhook = dhook
864 867
865 868 if hasattr(self,'tb'):
866 869 del self.tb
867 870
868 871 def handler(self, info=None):
869 872 (etype, evalue, etb) = info or sys.exc_info()
870 873 self.tb = etb
871 874 Term.cout.flush()
872 875 print >> Term.cerr, self.text(etype, evalue, etb)
873 876 Term.cerr.flush()
874 877
875 878 # Changed so an instance can just be called as VerboseTB_inst() and print
876 879 # out the right info on its own.
877 880 def __call__(self, etype=None, evalue=None, etb=None):
878 881 """This hook can replace sys.excepthook (for Python 2.1 or higher)."""
879 882 if etb is None:
880 883 self.handler()
881 884 else:
882 885 self.handler((etype, evalue, etb))
883 886 try:
884 887 self.debugger()
885 888 except KeyboardInterrupt:
886 889 print "\nKeyboardInterrupt"
887 890
888 891 #----------------------------------------------------------------------------
889 892 class FormattedTB(VerboseTB,ListTB):
890 893 """Subclass ListTB but allow calling with a traceback.
891 894
892 895 It can thus be used as a sys.excepthook for Python > 2.1.
893 896
894 897 Also adds 'Context' and 'Verbose' modes, not available in ListTB.
895 898
896 899 Allows a tb_offset to be specified. This is useful for situations where
897 900 one needs to remove a number of topmost frames from the traceback (such as
898 901 occurs with python programs that themselves execute other python code,
899 902 like Python shells). """
900 903
901 904 def __init__(self, mode = 'Plain', color_scheme='Linux',
902 905 tb_offset = 0,long_header=0,call_pdb=0,include_vars=0):
903 906
904 907 # NEVER change the order of this list. Put new modes at the end:
905 908 self.valid_modes = ['Plain','Context','Verbose']
906 909 self.verbose_modes = self.valid_modes[1:3]
907 910
908 911 VerboseTB.__init__(self,color_scheme,tb_offset,long_header,
909 912 call_pdb=call_pdb,include_vars=include_vars)
910 913 self.set_mode(mode)
911 914
912 915 def _extract_tb(self,tb):
913 916 if tb:
914 917 return traceback.extract_tb(tb)
915 918 else:
916 919 return None
917 920
918 921 def text(self, etype, value, tb,context=5,mode=None):
919 922 """Return formatted traceback.
920 923
921 924 If the optional mode parameter is given, it overrides the current
922 925 mode."""
923 926
924 927 if mode is None:
925 928 mode = self.mode
926 929 if mode in self.verbose_modes:
927 930 # verbose modes need a full traceback
928 931 return VerboseTB.text(self,etype, value, tb,context=5)
929 932 else:
930 933 # We must check the source cache because otherwise we can print
931 934 # out-of-date source code.
932 935 linecache.checkcache()
933 936 # Now we can extract and format the exception
934 937 elist = self._extract_tb(tb)
935 938 if len(elist) > self.tb_offset:
936 939 del elist[:self.tb_offset]
937 940 return ListTB.text(self,etype,value,elist)
938 941
939 942 def set_mode(self,mode=None):
940 943 """Switch to the desired mode.
941 944
942 945 If mode is not specified, cycles through the available modes."""
943 946
944 947 if not mode:
945 948 new_idx = ( self.valid_modes.index(self.mode) + 1 ) % \
946 949 len(self.valid_modes)
947 950 self.mode = self.valid_modes[new_idx]
948 951 elif mode not in self.valid_modes:
949 952 raise ValueError, 'Unrecognized mode in FormattedTB: <'+mode+'>\n'\
950 953 'Valid modes: '+str(self.valid_modes)
951 954 else:
952 955 self.mode = mode
953 956 # include variable details only in 'Verbose' mode
954 957 self.include_vars = (self.mode == self.valid_modes[2])
955 958
956 959 # some convenient shorcuts
957 960 def plain(self):
958 961 self.set_mode(self.valid_modes[0])
959 962
960 963 def context(self):
961 964 self.set_mode(self.valid_modes[1])
962 965
963 966 def verbose(self):
964 967 self.set_mode(self.valid_modes[2])
965 968
966 969 #----------------------------------------------------------------------------
967 970 class AutoFormattedTB(FormattedTB):
968 971 """A traceback printer which can be called on the fly.
969 972
970 973 It will find out about exceptions by itself.
971 974
972 975 A brief example:
973 976
974 977 AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux')
975 978 try:
976 979 ...
977 980 except:
978 981 AutoTB() # or AutoTB(out=logfile) where logfile is an open file object
979 982 """
980 983 def __call__(self,etype=None,evalue=None,etb=None,
981 984 out=None,tb_offset=None):
982 985 """Print out a formatted exception traceback.
983 986
984 987 Optional arguments:
985 988 - out: an open file-like object to direct output to.
986 989
987 990 - tb_offset: the number of frames to skip over in the stack, on a
988 991 per-call basis (this overrides temporarily the instance's tb_offset
989 992 given at initialization time. """
990 993
991 994 if out is None:
992 995 out = Term.cerr
993 996 Term.cout.flush()
994 997 if tb_offset is not None:
995 998 tb_offset, self.tb_offset = self.tb_offset, tb_offset
996 999 print >> out, self.text(etype, evalue, etb)
997 1000 self.tb_offset = tb_offset
998 1001 else:
999 1002 print >> out, self.text(etype, evalue, etb)
1000 1003 out.flush()
1001 1004 try:
1002 1005 self.debugger()
1003 1006 except KeyboardInterrupt:
1004 1007 print "\nKeyboardInterrupt"
1005 1008
1006 1009 def text(self,etype=None,value=None,tb=None,context=5,mode=None):
1007 1010 if etype is None:
1008 1011 etype,value,tb = sys.exc_info()
1009 1012 self.tb = tb
1010 1013 return FormattedTB.text(self,etype,value,tb,context=5,mode=mode)
1011 1014
1012 1015 #---------------------------------------------------------------------------
1013 1016 # A simple class to preserve Nathan's original functionality.
1014 1017 class ColorTB(FormattedTB):
1015 1018 """Shorthand to initialize a FormattedTB in Linux colors mode."""
1016 1019 def __init__(self,color_scheme='Linux',call_pdb=0):
1017 1020 FormattedTB.__init__(self,color_scheme=color_scheme,
1018 1021 call_pdb=call_pdb)
1019 1022
1020 1023 #----------------------------------------------------------------------------
1021 1024 # module testing (minimal)
1022 1025 if __name__ == "__main__":
1023 1026 def spam(c, (d, e)):
1024 1027 x = c + d
1025 1028 y = c * d
1026 1029 foo(x, y)
1027 1030
1028 1031 def foo(a, b, bar=1):
1029 1032 eggs(a, b + bar)
1030 1033
1031 1034 def eggs(f, g, z=globals()):
1032 1035 h = f + g
1033 1036 i = f - g
1034 1037 return h / i
1035 1038
1036 1039 print ''
1037 1040 print '*** Before ***'
1038 1041 try:
1039 1042 print spam(1, (2, 3))
1040 1043 except:
1041 1044 traceback.print_exc()
1042 1045 print ''
1043 1046
1044 1047 handler = ColorTB()
1045 1048 print '*** ColorTB ***'
1046 1049 try:
1047 1050 print spam(1, (2, 3))
1048 1051 except:
1049 1052 apply(handler, sys.exc_info() )
1050 1053 print ''
1051 1054
1052 1055 handler = VerboseTB()
1053 1056 print '*** VerboseTB ***'
1054 1057 try:
1055 1058 print spam(1, (2, 3))
1056 1059 except:
1057 1060 apply(handler, sys.exc_info() )
1058 1061 print ''
1059 1062
@@ -1,32 +1,32
1 include README_Windows.txt
2 include win32_manual_post_install.py
3 1 include ipython.py
4 2 include setupbase.py
3 include setupegg.py
5 4
6 5 graft scripts
7 6
8 7 graft setupext
9 8
10 9 graft IPython/UserConfig
11 10
12 11 graft IPython/kernel
13 12 graft IPython/config
14 13 graft IPython/testing
15 14 graft IPython/tools
16 15
17 16 recursive-include IPython/Extensions igrid_help*
18 17
19 18 graft docs
20 19 exclude docs/\#*
21 20 exclude docs/man/*.1
22 21
23 22 # docs subdirs we want to skip
24 23 prune docs/attic
25 24 prune docs/build
26 25
27 26 global-exclude *~
28 27 global-exclude *.flc
29 28 global-exclude *.pyc
30 29 global-exclude .dircopy.log
31 30 global-exclude .svn
32 31 global-exclude .bzr
32 global-exclude .hgignore
@@ -1,11 +1,11
1 ===============
2 IPython1 README
3 ===============
4
5 .. contents::
1 ==============
2 IPython README
3 ==============
6 4
7 5 Overview
8 6 ========
9 7
10 Welcome to IPython. New users should consult our documentation, which can be found
11 in the docs/source subdirectory.
8 Welcome to IPython. Our documentation can be found in the docs/source
9 subdirectory. We also have ``.html`` and ``.pdf`` versions of this
10 documentation available on the IPython `website <http://ipython.scipy.org>`_.
11
@@ -1,87 +1,86
1 1 # Makefile for Sphinx documentation
2 2 #
3 3
4 4 # You can set these variables from the command line.
5 5 SPHINXOPTS =
6 6 SPHINXBUILD = sphinx-build
7 7 PAPER =
8 8
9 9 # Internal variables.
10 10 PAPEROPT_a4 = -D latex_paper_size=a4
11 11 PAPEROPT_letter = -D latex_paper_size=letter
12 12 ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source
13 13
14 14 .PHONY: help clean html web pickle htmlhelp latex changes linkcheck
15 15
16 16 help:
17 17 @echo "Please use \`make <target>' where <target> is one of"
18 18 @echo " html to make standalone HTML files"
19 19 @echo " pickle to make pickle files (usable by e.g. sphinx-web)"
20 20 @echo " htmlhelp to make HTML files and a HTML help project"
21 21 @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
22 22 @echo " changes to make an overview over all changed/added/deprecated items"
23 23 @echo " linkcheck to check all external links for integrity"
24 24 @echo
25 25 @echo "Compound utility targets:"
26 26 @echo "pdf latex and then runs the PDF generation"
27 27 @echo "all html and pdf"
28 28 @echo "dist all, and then puts the results in dist/"
29 29
30 30 clean:
31 -rm -rf build/*
31 -rm -rf build/* dist/*
32 32
33 33 pdf: latex
34 34 cd build/latex && make all-pdf
35 35
36 36 all: html pdf
37 37
38 dist: all
38 dist: clean all
39 39 mkdir -p dist
40 -rm -rf dist/*
41 ln build/latex/IPython.pdf dist/
40 ln build/latex/ipython.pdf dist/
42 41 cp -al build/html dist/
43 42 @echo "Build finished. Final docs are in dist/"
44 43
45 44 html:
46 45 mkdir -p build/html build/doctrees
47 46 $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html
48 47 @echo
49 48 @echo "Build finished. The HTML pages are in build/html."
50 49
51 50 pickle:
52 51 mkdir -p build/pickle build/doctrees
53 52 $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle
54 53 @echo
55 54 @echo "Build finished; now you can process the pickle files or run"
56 55 @echo " sphinx-web build/pickle"
57 56 @echo "to start the sphinx-web server."
58 57
59 58 web: pickle
60 59
61 60 htmlhelp:
62 61 mkdir -p build/htmlhelp build/doctrees
63 62 $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp
64 63 @echo
65 64 @echo "Build finished; now you can run HTML Help Workshop with the" \
66 65 ".hhp project file in build/htmlhelp."
67 66
68 67 latex:
69 68 mkdir -p build/latex build/doctrees
70 69 $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex
71 70 @echo
72 71 @echo "Build finished; the LaTeX files are in build/latex."
73 72 @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \
74 73 "run these through (pdf)latex."
75 74
76 75 changes:
77 76 mkdir -p build/changes build/doctrees
78 77 $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes
79 78 @echo
80 79 @echo "The overview file is in build/changes."
81 80
82 81 linkcheck:
83 82 mkdir -p build/linkcheck build/doctrees
84 83 $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck
85 84 @echo
86 85 @echo "Link check complete; look for any errors in the above output " \
87 86 "or in build/linkcheck/output.txt."
@@ -1,42 +1,43
1 1 import numpy as N
2 2 from math import *
3 3
4 4 class MCOptionPricer(object):
5 5 def __init__(self, S=100.0, K=100.0, sigma=0.25, r=0.05, days=260, paths=10000):
6 6 self.S = S
7 7 self.K = K
8 8 self.sigma = sigma
9 9 self.r = r
10 10 self.days = days
11 11 self.paths = paths
12 12 self.h = 1.0/self.days
13 13 self.const1 = exp((self.r-0.5*self.sigma**2)*self.h)
14 14 self.const2 = self.sigma*sqrt(self.h)
15 15
16 16 def run(self):
17 17 stock_price = self.S*N.ones(self.paths, dtype='float64')
18 18 stock_price_sum = N.zeros(self.paths, dtype='float64')
19 19 for j in range(self.days):
20 20 growth_factor = self.const1*N.exp(self.const2*N.random.standard_normal(self.paths))
21 21 stock_price = stock_price*growth_factor
22 22 stock_price_sum = stock_price_sum + stock_price
23 23 stock_price_avg = stock_price_sum/self.days
24 24 zeros = N.zeros(self.paths, dtype='float64')
25 25 r_factor = exp(-self.r*self.h*self.days)
26 26 self.vanilla_put = r_factor*N.mean(N.maximum(zeros,self.K-stock_price))
27 27 self.asian_put = r_factor*N.mean(N.maximum(zeros,self.K-stock_price_avg))
28 28 self.vanilla_call = r_factor*N.mean(N.maximum(zeros,stock_price-self.K))
29 29 self.asian_call = r_factor*N.mean(N.maximum(zeros,stock_price_avg-self.K))
30 30
31 31
32 32 def main():
33 33 op = MCOptionPricer()
34 34 op.run()
35 35 print "Vanilla Put Price = ", op.vanilla_put
36 36 print "Asian Put Price = ", op.asian_put
37 37 print "Vanilla Call Price = ", op.vanilla_call
38 38 print "Asian Call Price = ", op.asian_call
39 39
40 40
41 41 if __name__ == '__main__':
42 42 main()
43
@@ -1,120 +1,123
1 1 """Example showing how to merge multiple remote data streams.
2 2 """
3 3 # Slightly modified version of:
4 4 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/511509
5 5
6 6 import heapq
7 7 from IPython.kernel.error import CompositeError
8 8
9 9 def mergesort(list_of_lists, key=None):
10 10 """ Perform an N-way merge operation on sorted lists.
11 11
12 12 @param list_of_lists: (really iterable of iterable) of sorted elements
13 13 (either by naturally or by C{key})
14 14 @param key: specify sort key function (like C{sort()}, C{sorted()})
15 15
16 16 Yields tuples of the form C{(item, iterator)}, where the iterator is the
17 17 built-in list iterator or something you pass in, if you pre-generate the
18 18 iterators.
19 19
20 20 This is a stable merge; complexity O(N lg N)
21 21
22 22 Examples::
23 23
24 24 >>> print list(mergesort([[1,2,3,4],
25 25 ... [2,3.25,3.75,4.5,6,7],
26 26 ... [2.625,3.625,6.625,9]]))
27 27 [1, 2, 2, 2.625, 3, 3.25, 3.625, 3.75, 4, 4.5, 6, 6.625, 7, 9]
28 28
29 29 # note stability
30 30 >>> print list(mergesort([[1,2,3,4],
31 31 ... [2,3.25,3.75,4.5,6,7],
32 32 ... [2.625,3.625,6.625,9]],
33 33 ... key=int))
34 34 [1, 2, 2, 2.625, 3, 3.25, 3.75, 3.625, 4, 4.5, 6, 6.625, 7, 9]
35 35
36 36
37 37 >>> print list(mergesort([[4, 3, 2, 1],
38 38 ... [7, 6, 4.5, 3.75, 3.25, 2],
39 39 ... [9, 6.625, 3.625, 2.625]],
40 40 ... key=lambda x: -x))
41 41 [9, 7, 6.625, 6, 4.5, 4, 3.75, 3.625, 3.25, 3, 2.625, 2, 2, 1]
42 42 """
43 43
44 44 heap = []
45 45 for i, itr in enumerate(iter(pl) for pl in list_of_lists):
46 46 try:
47 47 item = itr.next()
48 toadd = (key(item), i, item, itr) if key else (item, i, itr)
48 if key:
49 toadd = (key(item), i, item, itr)
50 else:
51 toadd = (item, i, itr)
49 52 heap.append(toadd)
50 53 except StopIteration:
51 54 pass
52 55 heapq.heapify(heap)
53 56
54 57 if key:
55 58 while heap:
56 59 _, idx, item, itr = heap[0]
57 60 yield item
58 61 try:
59 62 item = itr.next()
60 63 heapq.heapreplace(heap, (key(item), idx, item, itr) )
61 64 except StopIteration:
62 65 heapq.heappop(heap)
63 66
64 67 else:
65 68 while heap:
66 69 item, idx, itr = heap[0]
67 70 yield item
68 71 try:
69 72 heapq.heapreplace(heap, (itr.next(), idx, itr))
70 73 except StopIteration:
71 74 heapq.heappop(heap)
72 75
73 76
74 77 def remote_iterator(rc,engine,name):
75 78 """Return an iterator on an object living on a remote engine.
76 79 """
77 80 # Check that the object exists on the engine and pin a reference to it
78 81 iter_name = '_%s_rmt_iter_' % name
79 82 rc.execute('%s = iter(%s)' % (iter_name,name), targets=engine)
80 83 tpl = '_tmp = %s.next()' % iter_name
81 84 while True:
82 85 try:
83 86 rc.execute(tpl, targets=engine)
84 87 result = rc.pull('_tmp', targets=engine)[0]
85 88 # This causes the StopIteration exception to be raised.
86 89 except CompositeError, e:
87 90 e.raise_exception()
88 91 else:
89 92 yield result
90 93
91 94 # Main, interactive testing
92 95 if __name__ == '__main__':
93 96
94 97 from IPython.kernel import client
95 98 ipc = client.MultiEngineClient()
96 99 print 'Engine IDs:',ipc.get_ids()
97 100
98 101 # Make a set of 'sorted datasets'
99 102 a0 = range(5,20)
100 103 a1 = range(10)
101 104 a2 = range(15,25)
102 105
103 106 # Now, imagine these had been created in the remote engines by some long
104 107 # computation. In this simple example, we just send them over into the
105 108 # remote engines. They will all be called 'a' in each engine.
106 109 ipc.push(dict(a=a0), targets=0)
107 110 ipc.push(dict(a=a1), targets=1)
108 111 ipc.push(dict(a=a2), targets=2)
109 112
110 113 # And we now make a local object which represents the remote iterator
111 114 aa0 = remote_iterator(ipc,0,'a')
112 115 aa1 = remote_iterator(ipc,1,'a')
113 116 aa2 = remote_iterator(ipc,2,'a')
114 117
115 118 # Let's merge them, both locally and remotely:
116 119 print 'Merge the local datasets:'
117 120 print list(mergesort([a0,a1,a2]))
118 121
119 122 print 'Locally merge the remote sets:'
120 123 print list(mergesort([aa0,aa1,aa2]))
@@ -1,57 +1,57
1 1 #-------------------------------------------------------------------------------
2 2 # Driver code that the client runs.
3 3 #-------------------------------------------------------------------------------
4 4 # To run this code start a controller and engines using:
5 5 # ipcluster -n 2
6 6 # Then run the scripts by doing irunner rmt.ipy or by starting ipython and
7 7 # doing run rmt.ipy.
8 8
9 9 from rmtkernel import *
10 10 from IPython.kernel import client
11 11
12 12
13 13 def wignerDistribution(s):
14 14 """Returns (s, rho(s)) for the Wigner GOE distribution."""
15 15 return (numpy.pi*s/2.0) * numpy.exp(-numpy.pi*s**2/4.)
16 16
17 17
18 18 def generateWignerData():
19 19 s = numpy.linspace(0.0,4.0,400)
20 20 rhos = wignerDistribution(s)
21 21 return s, rhos
22 22
23 23
24 24 def serialDiffs(num, N):
25 25 diffs = ensembleDiffs(num, N)
26 26 normalizedDiffs = normalizeDiffs(diffs)
27 27 return normalizedDiffs
28 28
29 29
30 30 def parallelDiffs(rc, num, N):
31 31 nengines = len(rc.get_ids())
32 32 num_per_engine = num/nengines
33 33 print "Running with", num_per_engine, "per engine."
34 34 rc.push(dict(num_per_engine=num_per_engine, N=N))
35 35 rc.execute('diffs = ensembleDiffs(num_per_engine, N)')
36 36 # gather blocks always for now
37 37 pr = rc.gather('diffs')
38 38 return pr.r
39 39
40 40
41 41 # Main code
42 42 if __name__ == '__main__':
43 43 rc = client.MultiEngineClient()
44 44 print "Distributing code to engines..."
45 45 r = rc.run('rmtkernel.py')
46 46 rc.block = False
47 47
48 48 # Simulation parameters
49 49 nmats = 100
50 50 matsize = 30
51 51
52 52 %timeit -n1 -r1 serialDiffs(nmats,matsize)
53 53 %timeit -n1 -r1 parallelDiffs(rc, nmats, matsize)
54 54
55 55 # Uncomment these to plot the histogram
56 import pylab
57 pylab.hist(parallelDiffs(rc,matsize,matsize))
56 # import pylab
57 # pylab.hist(parallelDiffs(rc,matsize,matsize))
@@ -1,200 +1,393
1 1 .. _changes:
2 2
3 3 ==========
4 4 What's new
5 5 ==========
6 6
7 7 .. contents::
8 ..
9 1 Release 0.9.1
10 2 Release 0.9
11 2.1 New features
12 2.2 Bug fixes
13 2.3 Backwards incompatible changes
14 2.4 Changes merged in from IPython1
15 2.4.1 New features
16 2.4.2 Bug fixes
17 2.4.3 Backwards incompatible changes
18 3 Release 0.8.4
19 4 Release 0.8.3
20 5 Release 0.8.2
21 6 Older releases
22 ..
23
24 Release dev
25 ===========
26
27 New features
28 ------------
29
30 * The wonderful TextMate editor can now be used with %edit on OS X. Thanks
31 to Matt Foster for this patch.
32
33 * Fully refactored :command:`ipcluster` command line program for starting
34 IPython clusters. This new version is a complete rewrite and 1) is fully
35 cross platform (we now use Twisted's process management), 2) has much
36 improved performance, 3) uses subcommands for different types of clusters,
37 4) uses argparse for parsing command line options, 5) has better support
38 for starting clusters using :command:`mpirun`, 6) has experimental support
39 for starting engines using PBS. However, this new version of ipcluster
40 should be considered a technology preview. We plan on changing the API
41 in significant ways before it is final.
42
43 * The :mod:`argparse` module has been added to :mod:`IPython.external`.
44
45 * Fully description of the security model added to the docs.
46
47 * cd completer: show bookmarks if no other completions are available.
48
49 * sh profile: easy way to give 'title' to prompt: assign to variable
50 '_prompt_title'. It looks like this::
51
52 [~]|1> _prompt_title = 'sudo!'
53 sudo![~]|2>
54
55 * %edit: If you do '%edit pasted_block', pasted_block
56 variable gets updated with new data (so repeated
57 editing makes sense)
58
59 Bug fixes
60 ---------
61
62 * The ipengine and ipcontroller scripts now handle missing furl files
63 more gracefully by giving better error messages.
64
65 * %rehashx: Aliases no longer contain dots. python3.0 binary
66 will create alias python30. Fixes:
67 #259716 "commands with dots in them don't work"
68
69 * %cpaste: %cpaste -r repeats the last pasted block.
70 The block is assigned to pasted_block even if code
71 raises exception.
72
73 Backwards incompatible changes
74 ------------------------------
75
76 * The controller now has a ``-r`` flag that needs to be used if you want to
77 reuse existing furl files. Otherwise they are deleted (the default).
78
79 * Remove ipy_leo.py. "easy_install ipython-extension" to get it.
80 (done to decouple it from ipython release cycle)
81
82
83
84 Release 0.9.1
85 =============
86
87 This release was quickly made to restore compatibility with Python 2.4, which
88 version 0.9 accidentally broke. No new features were introduced, other than
89 some additional testing support for internal use.
90
8 91
9 92 Release 0.9
10 93 ===========
11 94
12 95 New features
13 96 ------------
14 97
15 * The notion of a task has been completely reworked. An `ITask` interface has
16 been created. This interface defines the methods that tasks need to implement.
17 These methods are now responsible for things like submitting tasks and processing
18 results. There are two basic task types: :class:`IPython.kernel.task.StringTask`
19 (this is the old `Task` object, but renamed) and the new
20 :class:`IPython.kernel.task.MapTask`, which is based on a function.
21 * A new interface, :class:`IPython.kernel.mapper.IMapper` has been defined to
22 standardize the idea of a `map` method. This interface has a single
23 `map` method that has the same syntax as the built-in `map`. We have also defined
24 a `mapper` factory interface that creates objects that implement
25 :class:`IPython.kernel.mapper.IMapper` for different controllers. Both
26 the multiengine and task controller now have mapping capabilties.
27 * The parallel function capabilities have been reworks. The major changes are that
28 i) there is now an `@parallel` magic that creates parallel functions, ii)
29 the syntax for mulitple variable follows that of `map`, iii) both the
30 multiengine and task controller now have a parallel function implementation.
31 * All of the parallel computing capabilities from `ipython1-dev` have been merged into
32 IPython proper. This resulted in the following new subpackages:
33 :mod:`IPython.kernel`, :mod:`IPython.kernel.core`, :mod:`IPython.config`,
34 :mod:`IPython.tools` and :mod:`IPython.testing`.
35 * As part of merging in the `ipython1-dev` stuff, the `setup.py` script and friends
36 have been completely refactored. Now we are checking for dependencies using
37 the approach that matplotlib uses.
38 * The documentation has been completely reorganized to accept the documentation
39 from `ipython1-dev`.
40 * We have switched to using Foolscap for all of our network protocols in
41 :mod:`IPython.kernel`. This gives us secure connections that are both encrypted
42 and authenticated.
43 * We have a brand new `COPYING.txt` files that describes the IPython license
44 and copyright. The biggest change is that we are putting "The IPython
45 Development Team" as the copyright holder. We give more details about exactly
46 what this means in this file. All developer should read this and use the new
47 banner in all IPython source code files.
48 * sh profile: ./foo runs foo as system command, no need to do !./foo anymore
49 * String lists now support 'sort(field, nums = True)' method (to easily
50 sort system command output). Try it with 'a = !ls -l ; a.sort(1, nums=1)'
51 * '%cpaste foo' now assigns the pasted block as string list, instead of string
52 * The ipcluster script now run by default with no security. This is done because
53 the main usage of the script is for starting things on localhost. Eventually
54 when ipcluster is able to start things on other hosts, we will put security
55 back.
56 * 'cd --foo' searches directory history for string foo, and jumps to that dir.
57 Last part of dir name is checked first. If no matches for that are found,
58 look at the whole path.
98 * All furl files and security certificates are now put in a read-only
99 directory named ~./ipython/security.
100
101 * A single function :func:`get_ipython_dir`, in :mod:`IPython.genutils` that
102 determines the user's IPython directory in a robust manner.
103
104 * Laurent's WX application has been given a top-level script called
105 ipython-wx, and it has received numerous fixes. We expect this code to be
106 architecturally better integrated with Gael's WX 'ipython widget' over the
107 next few releases.
108
109 * The Editor synchronization work by Vivian De Smedt has been merged in. This
110 code adds a number of new editor hooks to synchronize with editors under
111 Windows.
112
113 * A new, still experimental but highly functional, WX shell by Gael Varoquaux.
114 This work was sponsored by Enthought, and while it's still very new, it is
115 based on a more cleanly organized arhictecture of the various IPython
116 components. We will continue to develop this over the next few releases as a
117 model for GUI components that use IPython.
118
119 * Another GUI frontend, Cocoa based (Cocoa is the OSX native GUI framework),
120 authored by Barry Wark. Currently the WX and the Cocoa ones have slightly
121 different internal organizations, but the whole team is working on finding
122 what the right abstraction points are for a unified codebase.
123
124 * As part of the frontend work, Barry Wark also implemented an experimental
125 event notification system that various ipython components can use. In the
126 next release the implications and use patterns of this system regarding the
127 various GUI options will be worked out.
128
129 * IPython finally has a full test system, that can test docstrings with
130 IPython-specific functionality. There are still a few pieces missing for it
131 to be widely accessible to all users (so they can run the test suite at any
132 time and report problems), but it now works for the developers. We are
133 working hard on continuing to improve it, as this was probably IPython's
134 major Achilles heel (the lack of proper test coverage made it effectively
135 impossible to do large-scale refactoring). The full test suite can now
136 be run using the :command:`iptest` command line program.
137
138 * The notion of a task has been completely reworked. An `ITask` interface has
139 been created. This interface defines the methods that tasks need to
140 implement. These methods are now responsible for things like submitting
141 tasks and processing results. There are two basic task types:
142 :class:`IPython.kernel.task.StringTask` (this is the old `Task` object, but
143 renamed) and the new :class:`IPython.kernel.task.MapTask`, which is based on
144 a function.
145
146 * A new interface, :class:`IPython.kernel.mapper.IMapper` has been defined to
147 standardize the idea of a `map` method. This interface has a single `map`
148 method that has the same syntax as the built-in `map`. We have also defined
149 a `mapper` factory interface that creates objects that implement
150 :class:`IPython.kernel.mapper.IMapper` for different controllers. Both the
151 multiengine and task controller now have mapping capabilties.
152
153 * The parallel function capabilities have been reworks. The major changes are
154 that i) there is now an `@parallel` magic that creates parallel functions,
155 ii) the syntax for mulitple variable follows that of `map`, iii) both the
156 multiengine and task controller now have a parallel function implementation.
59 157
158 * All of the parallel computing capabilities from `ipython1-dev` have been
159 merged into IPython proper. This resulted in the following new subpackages:
160 :mod:`IPython.kernel`, :mod:`IPython.kernel.core`, :mod:`IPython.config`,
161 :mod:`IPython.tools` and :mod:`IPython.testing`.
162
163 * As part of merging in the `ipython1-dev` stuff, the `setup.py` script and
164 friends have been completely refactored. Now we are checking for
165 dependencies using the approach that matplotlib uses.
166
167 * The documentation has been completely reorganized to accept the
168 documentation from `ipython1-dev`.
169
170 * We have switched to using Foolscap for all of our network protocols in
171 :mod:`IPython.kernel`. This gives us secure connections that are both
172 encrypted and authenticated.
173
174 * We have a brand new `COPYING.txt` files that describes the IPython license
175 and copyright. The biggest change is that we are putting "The IPython
176 Development Team" as the copyright holder. We give more details about
177 exactly what this means in this file. All developer should read this and use
178 the new banner in all IPython source code files.
179
180 * sh profile: ./foo runs foo as system command, no need to do !./foo anymore
181
182 * String lists now support ``sort(field, nums = True)`` method (to easily sort
183 system command output). Try it with ``a = !ls -l ; a.sort(1, nums=1)``.
184
185 * '%cpaste foo' now assigns the pasted block as string list, instead of string
186
187 * The ipcluster script now run by default with no security. This is done
188 because the main usage of the script is for starting things on localhost.
189 Eventually when ipcluster is able to start things on other hosts, we will put
190 security back.
191
192 * 'cd --foo' searches directory history for string foo, and jumps to that dir.
193 Last part of dir name is checked first. If no matches for that are found,
194 look at the whole path.
195
196
60 197 Bug fixes
61 198 ---------
62 199
63 * The colors escapes in the multiengine client are now turned off on win32 as they
64 don't print correctly.
65 * The :mod:`IPython.kernel.scripts.ipengine` script was exec'ing mpi_import_statement
66 incorrectly, which was leading the engine to crash when mpi was enabled.
67 * A few subpackages has missing `__init__.py` files.
68 * The documentation is only created is Sphinx is found. Previously, the `setup.py`
69 script would fail if it was missing.
70 * Greedy 'cd' completion has been disabled again (it was enabled in 0.8.4)
200 * The Windows installer has been fixed. Now all IPython scripts have ``.bat``
201 versions created. Also, the Start Menu shortcuts have been updated.
202
203 * The colors escapes in the multiengine client are now turned off on win32 as
204 they don't print correctly.
205
206 * The :mod:`IPython.kernel.scripts.ipengine` script was exec'ing
207 mpi_import_statement incorrectly, which was leading the engine to crash when
208 mpi was enabled.
209
210 * A few subpackages had missing ``__init__.py`` files.
211
212 * The documentation is only created if Sphinx is found. Previously, the
213 ``setup.py`` script would fail if it was missing.
214
215 * Greedy ``cd`` completion has been disabled again (it was enabled in 0.8.4) as
216 it caused problems on certain platforms.
71 217
72 218
73 219 Backwards incompatible changes
74 220 ------------------------------
75 221
76 * :class:`IPython.kernel.client.Task` has been renamed
77 :class:`IPython.kernel.client.StringTask` to make way for new task types.
78 * The keyword argument `style` has been renamed `dist` in `scatter`, `gather`
79 and `map`.
80 * Renamed the values that the rename `dist` keyword argument can have from
81 `'basic'` to `'b'`.
82 * IPython has a larger set of dependencies if you want all of its capabilities.
83 See the `setup.py` script for details.
84 * The constructors for :class:`IPython.kernel.client.MultiEngineClient` and
85 :class:`IPython.kernel.client.TaskClient` no longer take the (ip,port) tuple.
86 Instead they take the filename of a file that contains the FURL for that
87 client. If the FURL file is in your IPYTHONDIR, it will be found automatically
88 and the constructor can be left empty.
89 * The asynchronous clients in :mod:`IPython.kernel.asyncclient` are now created
90 using the factory functions :func:`get_multiengine_client` and
91 :func:`get_task_client`. These return a `Deferred` to the actual client.
92 * The command line options to `ipcontroller` and `ipengine` have changed to
93 reflect the new Foolscap network protocol and the FURL files. Please see the
94 help for these scripts for details.
95 * The configuration files for the kernel have changed because of the Foolscap stuff.
96 If you were using custom config files before, you should delete them and regenerate
97 new ones.
222 * The ``clusterfile`` options of the :command:`ipcluster` command has been
223 removed as it was not working and it will be replaced soon by something much
224 more robust.
225
226 * The :mod:`IPython.kernel` configuration now properly find the user's
227 IPython directory.
228
229 * In ipapi, the :func:`make_user_ns` function has been replaced with
230 :func:`make_user_namespaces`, to support dict subclasses in namespace
231 creation.
232
233 * :class:`IPython.kernel.client.Task` has been renamed
234 :class:`IPython.kernel.client.StringTask` to make way for new task types.
235
236 * The keyword argument `style` has been renamed `dist` in `scatter`, `gather`
237 and `map`.
238
239 * Renamed the values that the rename `dist` keyword argument can have from
240 `'basic'` to `'b'`.
241
242 * IPython has a larger set of dependencies if you want all of its capabilities.
243 See the `setup.py` script for details.
244
245 * The constructors for :class:`IPython.kernel.client.MultiEngineClient` and
246 :class:`IPython.kernel.client.TaskClient` no longer take the (ip,port) tuple.
247 Instead they take the filename of a file that contains the FURL for that
248 client. If the FURL file is in your IPYTHONDIR, it will be found automatically
249 and the constructor can be left empty.
250
251 * The asynchronous clients in :mod:`IPython.kernel.asyncclient` are now created
252 using the factory functions :func:`get_multiengine_client` and
253 :func:`get_task_client`. These return a `Deferred` to the actual client.
254
255 * The command line options to `ipcontroller` and `ipengine` have changed to
256 reflect the new Foolscap network protocol and the FURL files. Please see the
257 help for these scripts for details.
258
259 * The configuration files for the kernel have changed because of the Foolscap
260 stuff. If you were using custom config files before, you should delete them
261 and regenerate new ones.
98 262
99 263 Changes merged in from IPython1
100 264 -------------------------------
101 265
102 266 New features
103 267 ............
104 268
105 * Much improved ``setup.py`` and ``setupegg.py`` scripts. Because Twisted
106 and zope.interface are now easy installable, we can declare them as dependencies
107 in our setupegg.py script.
108 * IPython is now compatible with Twisted 2.5.0 and 8.x.
109 * Added a new example of how to use :mod:`ipython1.kernel.asynclient`.
110 * Initial draft of a process daemon in :mod:`ipython1.daemon`. This has not
111 been merged into IPython and is still in `ipython1-dev`.
112 * The ``TaskController`` now has methods for getting the queue status.
113 * The ``TaskResult`` objects not have information about how long the task
114 took to run.
115 * We are attaching additional attributes to exceptions ``(_ipython_*)`` that
116 we use to carry additional info around.
117 * New top-level module :mod:`asyncclient` that has asynchronous versions (that
118 return deferreds) of the client classes. This is designed to users who want
119 to run their own Twisted reactor
120 * All the clients in :mod:`client` are now based on Twisted. This is done by
121 running the Twisted reactor in a separate thread and using the
122 :func:`blockingCallFromThread` function that is in recent versions of Twisted.
123 * Functions can now be pushed/pulled to/from engines using
124 :meth:`MultiEngineClient.push_function` and :meth:`MultiEngineClient.pull_function`.
125 * Gather/scatter are now implemented in the client to reduce the work load
126 of the controller and improve performance.
127 * Complete rewrite of the IPython docuementation. All of the documentation
128 from the IPython website has been moved into docs/source as restructured
129 text documents. PDF and HTML documentation are being generated using
130 Sphinx.
131 * New developer oriented documentation: development guidelines and roadmap.
132 * Traditional ``ChangeLog`` has been changed to a more useful ``changes.txt`` file
133 that is organized by release and is meant to provide something more relevant
134 for users.
269 * Much improved ``setup.py`` and ``setupegg.py`` scripts. Because Twisted and
270 zope.interface are now easy installable, we can declare them as dependencies
271 in our setupegg.py script.
272
273 * IPython is now compatible with Twisted 2.5.0 and 8.x.
274
275 * Added a new example of how to use :mod:`ipython1.kernel.asynclient`.
276
277 * Initial draft of a process daemon in :mod:`ipython1.daemon`. This has not
278 been merged into IPython and is still in `ipython1-dev`.
279
280 * The ``TaskController`` now has methods for getting the queue status.
281
282 * The ``TaskResult`` objects not have information about how long the task
283 took to run.
284
285 * We are attaching additional attributes to exceptions ``(_ipython_*)`` that
286 we use to carry additional info around.
287
288 * New top-level module :mod:`asyncclient` that has asynchronous versions (that
289 return deferreds) of the client classes. This is designed to users who want
290 to run their own Twisted reactor.
291
292 * All the clients in :mod:`client` are now based on Twisted. This is done by
293 running the Twisted reactor in a separate thread and using the
294 :func:`blockingCallFromThread` function that is in recent versions of Twisted.
295
296 * Functions can now be pushed/pulled to/from engines using
297 :meth:`MultiEngineClient.push_function` and
298 :meth:`MultiEngineClient.pull_function`.
299
300 * Gather/scatter are now implemented in the client to reduce the work load
301 of the controller and improve performance.
302
303 * Complete rewrite of the IPython docuementation. All of the documentation
304 from the IPython website has been moved into docs/source as restructured
305 text documents. PDF and HTML documentation are being generated using
306 Sphinx.
307
308 * New developer oriented documentation: development guidelines and roadmap.
309
310 * Traditional ``ChangeLog`` has been changed to a more useful ``changes.txt``
311 file that is organized by release and is meant to provide something more
312 relevant for users.
135 313
136 314 Bug fixes
137 315 .........
138 316
139 * Created a proper ``MANIFEST.in`` file to create source distributions.
140 * Fixed a bug in the ``MultiEngine`` interface. Previously, multi-engine
141 actions were being collected with a :class:`DeferredList` with
142 ``fireononeerrback=1``. This meant that methods were returning
143 before all engines had given their results. This was causing extremely odd
144 bugs in certain cases. To fix this problem, we have 1) set
145 ``fireononeerrback=0`` to make sure all results (or exceptions) are in
146 before returning and 2) introduced a :exc:`CompositeError` exception
147 that wraps all of the engine exceptions. This is a huge change as it means
148 that users will have to catch :exc:`CompositeError` rather than the actual
149 exception.
317 * Created a proper ``MANIFEST.in`` file to create source distributions.
318
319 * Fixed a bug in the ``MultiEngine`` interface. Previously, multi-engine
320 actions were being collected with a :class:`DeferredList` with
321 ``fireononeerrback=1``. This meant that methods were returning
322 before all engines had given their results. This was causing extremely odd
323 bugs in certain cases. To fix this problem, we have 1) set
324 ``fireononeerrback=0`` to make sure all results (or exceptions) are in
325 before returning and 2) introduced a :exc:`CompositeError` exception
326 that wraps all of the engine exceptions. This is a huge change as it means
327 that users will have to catch :exc:`CompositeError` rather than the actual
328 exception.
150 329
151 330 Backwards incompatible changes
152 331 ..............................
153 332
154 * All names have been renamed to conform to the lowercase_with_underscore
155 convention. This will require users to change references to all names like
156 ``queueStatus`` to ``queue_status``.
157 * Previously, methods like :meth:`MultiEngineClient.push` and
158 :meth:`MultiEngineClient.push` used ``*args`` and ``**kwargs``. This was
159 becoming a problem as we weren't able to introduce new keyword arguments into
160 the API. Now these methods simple take a dict or sequence. This has also allowed
161 us to get rid of the ``*All`` methods like :meth:`pushAll` and :meth:`pullAll`.
162 These things are now handled with the ``targets`` keyword argument that defaults
163 to ``'all'``.
164 * The :attr:`MultiEngineClient.magicTargets` has been renamed to
165 :attr:`MultiEngineClient.targets`.
166 * All methods in the MultiEngine interface now accept the optional keyword argument
167 ``block``.
168 * Renamed :class:`RemoteController` to :class:`MultiEngineClient` and
169 :class:`TaskController` to :class:`TaskClient`.
170 * Renamed the top-level module from :mod:`api` to :mod:`client`.
171 * Most methods in the multiengine interface now raise a :exc:`CompositeError` exception
172 that wraps the user's exceptions, rather than just raising the raw user's exception.
173 * Changed the ``setupNS`` and ``resultNames`` in the ``Task`` class to ``push``
174 and ``pull``.
333 * All names have been renamed to conform to the lowercase_with_underscore
334 convention. This will require users to change references to all names like
335 ``queueStatus`` to ``queue_status``.
336
337 * Previously, methods like :meth:`MultiEngineClient.push` and
338 :meth:`MultiEngineClient.push` used ``*args`` and ``**kwargs``. This was
339 becoming a problem as we weren't able to introduce new keyword arguments into
340 the API. Now these methods simple take a dict or sequence. This has also
341 allowed us to get rid of the ``*All`` methods like :meth:`pushAll` and
342 :meth:`pullAll`. These things are now handled with the ``targets`` keyword
343 argument that defaults to ``'all'``.
344
345 * The :attr:`MultiEngineClient.magicTargets` has been renamed to
346 :attr:`MultiEngineClient.targets`.
347
348 * All methods in the MultiEngine interface now accept the optional keyword
349 argument ``block``.
350
351 * Renamed :class:`RemoteController` to :class:`MultiEngineClient` and
352 :class:`TaskController` to :class:`TaskClient`.
353
354 * Renamed the top-level module from :mod:`api` to :mod:`client`.
175 355
356 * Most methods in the multiengine interface now raise a :exc:`CompositeError`
357 exception that wraps the user's exceptions, rather than just raising the raw
358 user's exception.
359
360 * Changed the ``setupNS`` and ``resultNames`` in the ``Task`` class to ``push``
361 and ``pull``.
362
363
176 364 Release 0.8.4
177 365 =============
178 366
179 Someone needs to describe what went into 0.8.4.
367 This was a quick release to fix an unfortunate bug that slipped into the 0.8.3
368 release. The ``--twisted`` option was disabled, as it turned out to be broken
369 across several platforms.
180 370
181 Release 0.8.2
182 =============
183 371
184 * %pushd/%popd behave differently; now "pushd /foo" pushes CURRENT directory
185 and jumps to /foo. The current behaviour is closer to the documented
186 behaviour, and should not trip anyone.
187
188 372 Release 0.8.3
189 373 =============
190 374
191 375 * pydb is now disabled by default (due to %run -d problems). You can enable
192 376 it by passing -pydb command line argument to IPython. Note that setting
193 377 it in config file won't work.
194 378
379
380 Release 0.8.2
381 =============
382
383 * %pushd/%popd behave differently; now "pushd /foo" pushes CURRENT directory
384 and jumps to /foo. The current behaviour is closer to the documented
385 behaviour, and should not trip anyone.
386
387
195 388 Older releases
196 389 ==============
197 390
198 Changes in earlier releases of IPython are described in the older file ``ChangeLog``.
199 Please refer to this document for details.
391 Changes in earlier releases of IPython are described in the older file
392 ``ChangeLog``. Please refer to this document for details.
200 393
@@ -1,166 +1,187
1 1 # -*- coding: utf-8 -*-
2 2 #
3 # IPython documentation build configuration file, created by
4 # sphinx-quickstart on Thu May 8 16:45:02 2008.
3 # IPython documentation build configuration file.
4
5 # NOTE: This file has been edited manually from the auto-generated one from
6 # sphinx. Do NOT delete and re-generate. If any changes from sphinx are
7 # needed, generate a scratch one and merge by hand any new fields needed.
8
5 9 #
6 10 # This file is execfile()d with the current directory set to its containing dir.
7 11 #
8 12 # The contents of this file are pickled, so don't put values in the namespace
9 13 # that aren't pickleable (module imports are okay, they're removed automatically).
10 14 #
11 15 # All configuration values have a default value; values that are commented out
12 16 # serve to show the default value.
13 17
14 18 import sys, os
15 19
16 20 # If your extensions are in another directory, add it here. If the directory
17 21 # is relative to the documentation root, use os.path.abspath to make it
18 22 # absolute, like shown here.
19 #sys.path.append(os.path.abspath('some/directory'))
23 sys.path.append(os.path.abspath('../sphinxext'))
24
25 # Import support for ipython console session syntax highlighting (lives
26 # in the sphinxext directory defined above)
27 import ipython_console_highlighting
28
29 # We load the ipython release info into a dict by explicit execution
30 iprelease = {}
31 execfile('../../IPython/Release.py',iprelease)
20 32
21 33 # General configuration
22 34 # ---------------------
23 35
24 36 # Add any Sphinx extension module names here, as strings. They can be extensions
25 37 # coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
26 #extensions = []
38 extensions = ['sphinx.ext.autodoc',
39 'inheritance_diagram', 'only_directives',
40 'ipython_console_highlighting',
41 # 'plot_directive', # disabled for now, needs matplotlib
42 ]
27 43
28 44 # Add any paths that contain templates here, relative to this directory.
29 45 templates_path = ['_templates']
30 46
31 47 # The suffix of source filenames.
32 48 source_suffix = '.txt'
33 49
34 50 # The master toctree document.
35 51 master_doc = 'index'
36 52
37 53 # General substitutions.
38 54 project = 'IPython'
39 55 copyright = '2008, The IPython Development Team'
40 56
41 57 # The default replacements for |version| and |release|, also used in various
42 58 # other places throughout the built documents.
43 59 #
44 # The short X.Y version.
45 version = '0.9'
46 60 # The full version, including alpha/beta/rc tags.
47 release = '0.9.beta1'
61 release = iprelease['version']
62 # The short X.Y version.
63 version = '.'.join(release.split('.',2)[:2])
64
48 65
49 66 # There are two options for replacing |today|: either, you set today to some
50 67 # non-false value, then it is used:
51 68 #today = ''
52 69 # Else, today_fmt is used as the format for a strftime call.
53 70 today_fmt = '%B %d, %Y'
54 71
55 72 # List of documents that shouldn't be included in the build.
56 73 #unused_docs = []
57 74
58 75 # List of directories, relative to source directories, that shouldn't be searched
59 76 # for source files.
60 #exclude_dirs = []
77 exclude_dirs = ['attic']
61 78
62 79 # If true, '()' will be appended to :func: etc. cross-reference text.
63 80 #add_function_parentheses = True
64 81
65 82 # If true, the current module name will be prepended to all description
66 83 # unit titles (such as .. function::).
67 84 #add_module_names = True
68 85
69 86 # If true, sectionauthor and moduleauthor directives will be shown in the
70 87 # output. They are ignored by default.
71 88 #show_authors = False
72 89
73 90 # The name of the Pygments (syntax highlighting) style to use.
74 91 pygments_style = 'sphinx'
75 92
76 93
77 94 # Options for HTML output
78 95 # -----------------------
79 96
80 97 # The style sheet to use for HTML and HTML Help pages. A file of that name
81 98 # must exist either in Sphinx' static/ path, or in one of the custom paths
82 99 # given in html_static_path.
83 100 html_style = 'default.css'
84 101
85 102 # The name for this set of Sphinx documents. If None, it defaults to
86 103 # "<project> v<release> documentation".
87 104 #html_title = None
88 105
89 106 # The name of an image file (within the static path) to place at the top of
90 107 # the sidebar.
91 108 #html_logo = None
92 109
93 110 # Add any paths that contain custom static files (such as style sheets) here,
94 111 # relative to this directory. They are copied after the builtin static files,
95 112 # so a file named "default.css" will overwrite the builtin "default.css".
96 113 html_static_path = ['_static']
97 114
98 115 # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
99 116 # using the given strftime format.
100 117 html_last_updated_fmt = '%b %d, %Y'
101 118
102 119 # If true, SmartyPants will be used to convert quotes and dashes to
103 120 # typographically correct entities.
104 121 #html_use_smartypants = True
105 122
106 123 # Custom sidebar templates, maps document names to template names.
107 124 #html_sidebars = {}
108 125
109 126 # Additional templates that should be rendered to pages, maps page names to
110 127 # template names.
111 128 #html_additional_pages = {}
112 129
113 130 # If false, no module index is generated.
114 131 #html_use_modindex = True
115 132
116 133 # If true, the reST sources are included in the HTML build as _sources/<name>.
117 134 #html_copy_source = True
118 135
119 136 # If true, an OpenSearch description file will be output, and all pages will
120 137 # contain a <link> tag referring to it. The value of this option must be the
121 138 # base URL from which the finished HTML is served.
122 139 #html_use_opensearch = ''
123 140
124 141 # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
125 142 #html_file_suffix = ''
126 143
127 144 # Output file base name for HTML help builder.
128 htmlhelp_basename = 'IPythondoc'
145 htmlhelp_basename = 'ipythondoc'
129 146
130 147
131 148 # Options for LaTeX output
132 149 # ------------------------
133 150
134 151 # The paper size ('letter' or 'a4').
135 152 latex_paper_size = 'letter'
136 153
137 154 # The font size ('10pt', '11pt' or '12pt').
138 155 latex_font_size = '11pt'
139 156
140 157 # Grouping the document tree into LaTeX files. List of tuples
141 158 # (source start file, target name, title, author, document class [howto/manual]).
142 159
143 latex_documents = [ ('index', 'IPython.tex', 'IPython Documentation',
144 ur"""Brian Granger, Fernando Pérez and Ville Vainio\\
145 \ \\
146 With contributions from:\\
147 Benjamin Ragan-Kelley.""",
160 latex_documents = [ ('index', 'ipython.tex', 'IPython Documentation',
161 ur"""The IPython Development Team""",
148 162 'manual'),
149 163 ]
150 164
151 165 # The name of an image file (relative to this directory) to place at the top of
152 166 # the title page.
153 167 #latex_logo = None
154 168
155 169 # For "manual" documents, if this is true, then toplevel headings are parts,
156 170 # not chapters.
157 171 #latex_use_parts = False
158 172
159 173 # Additional stuff for the LaTeX preamble.
160 174 #latex_preamble = ''
161 175
162 176 # Documents to append as an appendix to all manuals.
163 177 #latex_appendices = []
164 178
165 179 # If false, no module index is generated.
166 180 #latex_use_modindex = True
181
182
183 # Cleanup
184 # -------
185 # delete release info to avoid pickling errors from sphinx
186
187 del iprelease
@@ -1,284 +1,286
1 1 .. _customization:
2 2
3 3 ========================
4 4 Customization of IPython
5 5 ========================
6 6
7 7 There are 2 ways to configure IPython - the old way of using ipythonrc
8 8 files (an INI-file like format), and the new way that involves editing
9 9 your ipy_user_conf.py. Both configuration systems work at the same
10 10 time, so you can set your options in both, but if you are hesitating
11 11 about which alternative to choose, we recommend the ipy_user_conf.py
12 12 approach, as it will give you more power and control in the long
13 13 run. However, there are few options such as pylab_import_all that can
14 14 only be specified in ipythonrc file or command line - the reason for
15 15 this is that they are needed before IPython has been started up, and
16 16 the IPApi object used in ipy_user_conf.py is not yet available at that
17 17 time. A hybrid approach of specifying a few options in ipythonrc and
18 18 doing the more advanced configuration in ipy_user_conf.py is also
19 19 possible.
20 20
21 .. _ipythonrc:
22
21 23 The ipythonrc approach
22 24 ======================
23 25
24 26 As we've already mentioned, IPython reads a configuration file which can
25 27 be specified at the command line (-rcfile) or which by default is
26 28 assumed to be called ipythonrc. Such a file is looked for in the current
27 29 directory where IPython is started and then in your IPYTHONDIR, which
28 30 allows you to have local configuration files for specific projects. In
29 31 this section we will call these types of configuration files simply
30 32 rcfiles (short for resource configuration file).
31 33
32 34 The syntax of an rcfile is one of key-value pairs separated by
33 35 whitespace, one per line. Lines beginning with a # are ignored as
34 36 comments, but comments can not be put on lines with data (the parser is
35 37 fairly primitive). Note that these are not python files, and this is
36 38 deliberate, because it allows us to do some things which would be quite
37 39 tricky to implement if they were normal python files.
38 40
39 First, an rcfile can contain permanent default values for almost all
40 command line options (except things like -help or -Version). Sec
41 `command line options`_ contains a description of all command-line
42 options. However, values you explicitly specify at the command line
43 override the values defined in the rcfile.
41 First, an rcfile can contain permanent default values for almost all command
42 line options (except things like -help or -Version). :ref:`This section
43 <command_line_options>` contains a description of all command-line
44 options. However, values you explicitly specify at the command line override
45 the values defined in the rcfile.
44 46
45 47 Besides command line option values, the rcfile can specify values for
46 48 certain extra special options which are not available at the command
47 49 line. These options are briefly described below.
48 50
49 51 Each of these options may appear as many times as you need it in the file.
50 52
51 53 * include <file1> <file2> ...: you can name other rcfiles you want
52 54 to recursively load up to 15 levels (don't use the <> brackets in
53 55 your names!). This feature allows you to define a 'base' rcfile
54 56 with general options and special-purpose files which can be loaded
55 57 only when needed with particular configuration options. To make
56 58 this more convenient, IPython accepts the -profile <name> option
57 59 (abbreviates to -p <name>) which tells it to look for an rcfile
58 60 named ipythonrc-<name>.
59 61 * import_mod <mod1> <mod2> ...: import modules with 'import
60 62 <mod1>,<mod2>,...'
61 63 * import_some <mod> <f1> <f2> ...: import functions with 'from
62 64 <mod> import <f1>,<f2>,...'
63 65 * import_all <mod1> <mod2> ...: for each module listed import
64 66 functions with ``from <mod> import *``.
65 67 * execute <python code>: give any single-line python code to be
66 68 executed.
67 69 * execfile <filename>: execute the python file given with an
68 70 'execfile(filename)' command. Username expansion is performed on
69 71 the given names. So if you need any amount of extra fancy
70 72 customization that won't fit in any of the above 'canned' options,
71 73 you can just put it in a separate python file and execute it.
72 74 * alias <alias_def>: this is equivalent to calling
73 75 '%alias <alias_def>' at the IPython command line. This way, from
74 76 within IPython you can do common system tasks without having to
75 77 exit it or use the ! escape. IPython isn't meant to be a shell
76 78 replacement, but it is often very useful to be able to do things
77 79 with files while testing code. This gives you the flexibility to
78 80 have within IPython any aliases you may be used to under your
79 81 normal system shell.
80 82
81 83 ipy_user_conf.py
82 84 ================
83 85
84 86 There should be a simple template ipy_user_conf.py file in your
85 87 ~/.ipython directory. It is a plain python module that is imported
86 88 during IPython startup, so you can do pretty much what you want there
87 89 - import modules, configure extensions, change options, define magic
88 90 commands, put variables and functions in the IPython namespace,
89 91 etc. You use the IPython extension api object, acquired by
90 92 IPython.ipapi.get() and documented in the "IPython extension API"
91 93 chapter, to interact with IPython. A sample ipy_user_conf.py is listed
92 94 below for reference::
93 95
94 96 # Most of your config files and extensions will probably start
95 97 # with this import
96 98
97 99 import IPython.ipapi
98 100 ip = IPython.ipapi.get()
99 101
100 102 # You probably want to uncomment this if you did %upgrade -nolegacy
101 103 # import ipy_defaults
102 104
103 105 import os
104 106
105 107 def main():
106 108
107 109 #ip.dbg.debugmode = True
108 110 ip.dbg.debug_stack()
109 111
110 112 # uncomment if you want to get ipython -p sh behaviour
111 113 # without having to use command line switches
112 114 import ipy_profile_sh
113 115 import jobctrl
114 116
115 117 # Configure your favourite editor?
116 118 # Good idea e.g. for %edit os.path.isfile
117 119
118 120 #import ipy_editors
119 121
120 122 # Choose one of these:
121 123
122 124 #ipy_editors.scite()
123 125 #ipy_editors.scite('c:/opt/scite/scite.exe')
124 126 #ipy_editors.komodo()
125 127 #ipy_editors.idle()
126 128 # ... or many others, try 'ipy_editors??' after import to see them
127 129
128 130 # Or roll your own:
129 131 #ipy_editors.install_editor("c:/opt/jed +$line $file")
130 132
131 133
132 134 o = ip.options
133 135 # An example on how to set options
134 136 #o.autocall = 1
135 137 o.system_verbose = 0
136 138
137 139 #import_all("os sys")
138 140 #execf('~/_ipython/ns.py')
139 141
140 142
141 143 # -- prompt
142 144 # A different, more compact set of prompts from the default ones, that
143 145 # always show your current location in the filesystem:
144 146
145 147 #o.prompt_in1 = r'\C_LightBlue[\C_LightCyan\Y2\C_LightBlue]\C_Normal\n\C_Green|\#>'
146 148 #o.prompt_in2 = r'.\D: '
147 149 #o.prompt_out = r'[\#] '
148 150
149 151 # Try one of these color settings if you can't read the text easily
150 152 # autoexec is a list of IPython commands to execute on startup
151 153 #o.autoexec.append('%colors LightBG')
152 154 #o.autoexec.append('%colors NoColor')
153 155 o.autoexec.append('%colors Linux')
154 156
155 157
156 158 # some config helper functions you can use
157 159 def import_all(modules):
158 160 """ Usage: import_all("os sys") """
159 161 for m in modules.split():
160 162 ip.ex("from %s import *" % m)
161 163
162 164 def execf(fname):
163 165 """ Execute a file in user namespace """
164 166 ip.ex('execfile("%s")' % os.path.expanduser(fname))
165 167
166 168 main()
167 169
168 170 .. _Prompts:
169 171
170 172 Fine-tuning your prompt
171 173 =======================
172 174
173 175 IPython's prompts can be customized using a syntax similar to that of
174 176 the bash shell. Many of bash's escapes are supported, as well as a few
175 177 additional ones. We list them below::
176 178
177 179 \#
178 180 the prompt/history count number. This escape is automatically
179 181 wrapped in the coloring codes for the currently active color scheme.
180 182 \N
181 183 the 'naked' prompt/history count number: this is just the number
182 184 itself, without any coloring applied to it. This lets you produce
183 185 numbered prompts with your own colors.
184 186 \D
185 187 the prompt/history count, with the actual digits replaced by dots.
186 188 Used mainly in continuation prompts (prompt_in2)
187 189 \w
188 190 the current working directory
189 191 \W
190 192 the basename of current working directory
191 193 \Xn
192 194 where $n=0\ldots5.$ The current working directory, with $HOME
193 195 replaced by ~, and filtered out to contain only $n$ path elements
194 196 \Yn
195 197 Similar to \Xn, but with the $n+1$ element included if it is ~ (this
196 198 is similar to the behavior of the %cn escapes in tcsh)
197 199 \u
198 200 the username of the current user
199 201 \$
200 202 if the effective UID is 0, a #, otherwise a $
201 203 \h
202 204 the hostname up to the first '.'
203 205 \H
204 206 the hostname
205 207 \n
206 208 a newline
207 209 \r
208 210 a carriage return
209 211 \v
210 212 IPython version string
211 213
212 214 In addition to these, ANSI color escapes can be insterted into the
213 215 prompts, as \C_ColorName. The list of valid color names is: Black, Blue,
214 216 Brown, Cyan, DarkGray, Green, LightBlue, LightCyan, LightGray,
215 217 LightGreen, LightPurple, LightRed, NoColor, Normal, Purple, Red, White,
216 218 Yellow.
217 219
218 220 Finally, IPython supports the evaluation of arbitrary expressions in
219 221 your prompt string. The prompt strings are evaluated through the syntax
220 222 of PEP 215, but basically you can use $x.y to expand the value of x.y,
221 223 and for more complicated expressions you can use braces: ${foo()+x} will
222 224 call function foo and add to it the value of x, before putting the
223 225 result into your prompt. For example, using
224 226 prompt_in1 '${commands.getoutput("uptime")}\nIn [\#]: '
225 227 will print the result of the uptime command on each prompt (assuming the
226 228 commands module has been imported in your ipythonrc file).
227 229
228 230
229 231 Prompt examples
230 232
231 233 The following options in an ipythonrc file will give you IPython's
232 234 default prompts::
233 235
234 236 prompt_in1 'In [\#]:'
235 237 prompt_in2 ' .\D.:'
236 238 prompt_out 'Out[\#]:'
237 239
238 240 which look like this::
239 241
240 242 In [1]: 1+2
241 243 Out[1]: 3
242 244
243 245 In [2]: for i in (1,2,3):
244 246 ...: print i,
245 247 ...:
246 248 1 2 3
247 249
248 250 These will give you a very colorful prompt with path information::
249 251
250 252 #prompt_in1 '\C_Red\u\C_Blue[\C_Cyan\Y1\C_Blue]\C_LightGreen\#>'
251 253 prompt_in2 ' ..\D>'
252 254 prompt_out '<\#>'
253 255
254 256 which look like this::
255 257
256 258 fperez[~/ipython]1> 1+2
257 259 <1> 3
258 260 fperez[~/ipython]2> for i in (1,2,3):
259 261 ...> print i,
260 262 ...>
261 263 1 2 3
262 264
263 265
264 266 .. _Profiles:
265 267
266 268 IPython profiles
267 269 ================
268 270
269 As we already mentioned, IPython supports the -profile command-line
270 option (see sec. `command line options`_). A profile is nothing more
271 than a particular configuration file like your basic ipythonrc one,
272 but with particular customizations for a specific purpose. When you
273 start IPython with 'ipython -profile <name>', it assumes that in your
274 IPYTHONDIR there is a file called ipythonrc-<name> or
275 ipy_profile_<name>.py, and loads it instead of the normal ipythonrc.
271 As we already mentioned, IPython supports the -profile command-line option (see
272 :ref:`here <command_line_options>`). A profile is nothing more than a
273 particular configuration file like your basic ipythonrc one, but with
274 particular customizations for a specific purpose. When you start IPython with
275 'ipython -profile <name>', it assumes that in your IPYTHONDIR there is a file
276 called ipythonrc-<name> or ipy_profile_<name>.py, and loads it instead of the
277 normal ipythonrc.
276 278
277 279 This system allows you to maintain multiple configurations which load
278 280 modules, set options, define functions, etc. suitable for different
279 281 tasks and activate them in a very simple manner. In order to avoid
280 282 having to repeat all of your basic options (common things that don't
281 283 change such as your color preferences, for example), any profile can
282 284 include another configuration file. The most common way to use profiles
283 285 is then to have each one include your basic ipythonrc file as a starting
284 286 point, and then add further customizations. No newline at end of file
@@ -1,10 +1,10
1 1 ===============================
2 2 Configuration and customization
3 3 ===============================
4 4
5 5 .. toctree::
6 :maxdepth: 1
6 :maxdepth: 2
7 7
8 8 initial_config.txt
9 9 customization.txt
10 10 new_config.txt
@@ -1,243 +1,244
1 1 .. _initial config:
2 2
3 3 =========================================
4 4 Initial configuration of your environment
5 5 =========================================
6 6
7 7 This section will help you set various things in your environment for
8 8 your IPython sessions to be as efficient as possible. All of IPython's
9 9 configuration information, along with several example files, is stored
10 10 in a directory named by default $HOME/.ipython. You can change this by
11 11 defining the environment variable IPYTHONDIR, or at runtime with the
12 12 command line option -ipythondir.
13 13
14 If all goes well, the first time you run IPython it should
15 automatically create a user copy of the config directory for you,
16 based on its builtin defaults. You can look at the files it creates to
17 learn more about configuring the system. The main file you will modify
18 to configure IPython's behavior is called ipythonrc (with a .ini
19 extension under Windows), included for reference in `ipythonrc`_
20 section. This file is very commented and has many variables you can
21 change to suit your taste, you can find more details in
22 Sec. customization_. Here we discuss the basic things you will want to
23 make sure things are working properly from the beginning.
14 If all goes well, the first time you run IPython it should automatically create
15 a user copy of the config directory for you, based on its builtin defaults. You
16 can look at the files it creates to learn more about configuring the
17 system. The main file you will modify to configure IPython's behavior is called
18 ipythonrc (with a .ini extension under Windows), included for reference
19 :ref:`here <ipythonrc>`. This file is very commented and has many variables you
20 can change to suit your taste, you can find more details :ref:`here
21 <customization>`. Here we discuss the basic things you will want to make sure
22 things are working properly from the beginning.
24 23
25 24
26 .. _Accessing help:
25 .. _accessing_help:
27 26
28 27 Access to the Python help system
29 28 ================================
30 29
31 This is true for Python in general (not just for IPython): you should
32 have an environment variable called PYTHONDOCS pointing to the directory
33 where your HTML Python documentation lives. In my system it's
34 /usr/share/doc/python-docs-2.3.4/html, check your local details or ask
35 your systems administrator.
30 This is true for Python in general (not just for IPython): you should have an
31 environment variable called PYTHONDOCS pointing to the directory where your
32 HTML Python documentation lives. In my system it's
33 :file:`/usr/share/doc/python-doc/html`, check your local details or ask your
34 systems administrator.
36 35
37 36 This is the directory which holds the HTML version of the Python
38 37 manuals. Unfortunately it seems that different Linux distributions
39 38 package these files differently, so you may have to look around a bit.
40 39 Below I show the contents of this directory on my system for reference::
41 40
42 41 [html]> ls
43 about.dat acks.html dist/ ext/ index.html lib/ modindex.html
44 stdabout.dat tut/ about.html api/ doc/ icons/ inst/ mac/ ref/ style.css
42 about.html dist/ icons/ lib/ python2.5.devhelp.gz whatsnew/
43 acks.html doc/ index.html mac/ ref/
44 api/ ext/ inst/ modindex.html tut/
45 45
46 46 You should really make sure this variable is correctly set so that
47 47 Python's pydoc-based help system works. It is a powerful and convenient
48 48 system with full access to the Python manuals and all modules accessible
49 49 to you.
50 50
51 51 Under Windows it seems that pydoc finds the documentation automatically,
52 52 so no extra setup appears necessary.
53 53
54 54
55 55 Editor
56 56 ======
57 57
58 58 The %edit command (and its alias %ed) will invoke the editor set in your
59 59 environment as EDITOR. If this variable is not set, it will default to
60 60 vi under Linux/Unix and to notepad under Windows. You may want to set
61 61 this variable properly and to a lightweight editor which doesn't take
62 62 too long to start (that is, something other than a new instance of
63 63 Emacs). This way you can edit multi-line code quickly and with the power
64 64 of a real editor right inside IPython.
65 65
66 66 If you are a dedicated Emacs user, you should set up the Emacs server so
67 67 that new requests are handled by the original process. This means that
68 68 almost no time is spent in handling the request (assuming an Emacs
69 69 process is already running). For this to work, you need to set your
70 70 EDITOR environment variable to 'emacsclient'. The code below, supplied
71 71 by Francois Pinard, can then be used in your .emacs file to enable the
72 72 server::
73 73
74 74 (defvar server-buffer-clients)
75 75 (when (and (fboundp 'server-start) (string-equal (getenv "TERM") 'xterm))
76 76 (server-start)
77 77 (defun fp-kill-server-with-buffer-routine ()
78 78 (and server-buffer-clients (server-done)))
79 79 (add-hook 'kill-buffer-hook 'fp-kill-server-with-buffer-routine))
80 80
81 81 You can also set the value of this editor via the commmand-line option
82 82 '-editor' or in your ipythonrc file. This is useful if you wish to use
83 83 specifically for IPython an editor different from your typical default
84 84 (and for Windows users who tend to use fewer environment variables).
85 85
86 86
87 87 Color
88 88 =====
89 89
90 90 The default IPython configuration has most bells and whistles turned on
91 91 (they're pretty safe). But there's one that may cause problems on some
92 92 systems: the use of color on screen for displaying information. This is
93 93 very useful, since IPython can show prompts and exception tracebacks
94 94 with various colors, display syntax-highlighted source code, and in
95 95 general make it easier to visually parse information.
96 96
97 97 The following terminals seem to handle the color sequences fine:
98 98
99 99 * Linux main text console, KDE Konsole, Gnome Terminal, E-term,
100 100 rxvt, xterm.
101 101 * CDE terminal (tested under Solaris). This one boldfaces light colors.
102 102 * (X)Emacs buffers. See the emacs_ section for more details on
103 103 using IPython with (X)Emacs.
104 104 * A Windows (XP/2k) command prompt with pyreadline_.
105 105 * A Windows (XP/2k) CygWin shell. Although some users have reported
106 106 problems; it is not clear whether there is an issue for everyone
107 107 or only under specific configurations. If you have full color
108 108 support under cygwin, please post to the IPython mailing list so
109 109 this issue can be resolved for all users.
110 110
111 .. _pyreadline: https://code.launchpad.net/pyreadline
112
111 113 These have shown problems:
112 114
113 115 * Windows command prompt in WinXP/2k logged into a Linux machine via
114 116 telnet or ssh.
115 117 * Windows native command prompt in WinXP/2k, without Gary Bishop's
116 118 extensions. Once Gary's readline library is installed, the normal
117 119 WinXP/2k command prompt works perfectly.
118 120
119 121 Currently the following color schemes are available:
120 122
121 123 * NoColor: uses no color escapes at all (all escapes are empty '' ''
122 124 strings). This 'scheme' is thus fully safe to use in any terminal.
123 125 * Linux: works well in Linux console type environments: dark
124 126 background with light fonts. It uses bright colors for
125 127 information, so it is difficult to read if you have a light
126 128 colored background.
127 129 * LightBG: the basic colors are similar to those in the Linux scheme
128 130 but darker. It is easy to read in terminals with light backgrounds.
129 131
130 132 IPython uses colors for two main groups of things: prompts and
131 133 tracebacks which are directly printed to the terminal, and the object
132 134 introspection system which passes large sets of data through a pager.
133 135
134 136
135 137 Input/Output prompts and exception tracebacks
136 138 =============================================
137 139
138 140 You can test whether the colored prompts and tracebacks work on your
139 141 system interactively by typing '%colors Linux' at the prompt (use
140 142 '%colors LightBG' if your terminal has a light background). If the input
141 143 prompt shows garbage like::
142 144
143 145 [0;32mIn [[1;32m1[0;32m]: [0;00m
144 146
145 147 instead of (in color) something like::
146 148
147 149 In [1]:
148 150
149 151 this means that your terminal doesn't properly handle color escape
150 152 sequences. You can go to a 'no color' mode by typing '%colors NoColor'.
151 153
152 154 You can try using a different terminal emulator program (Emacs users,
153 155 see below). To permanently set your color preferences, edit the file
154 156 $HOME/.ipython/ipythonrc and set the colors option to the desired value.
155 157
156 158
157 159 Object details (types, docstrings, source code, etc.)
158 160 =====================================================
159 161
160 IPython has a set of special functions for studying the objects you
161 are working with, discussed in detail in Sec. `dynamic object
162 information`_. But this system relies on passing information which is
163 longer than your screen through a data pager, such as the common Unix
164 less and more programs. In order to be able to see this information in
165 color, your pager needs to be properly configured. I strongly
166 recommend using less instead of more, as it seems that more simply can
162 IPython has a set of special functions for studying the objects you are working
163 with, discussed in detail :ref:`here <dynamic_object_info>`. But this system
164 relies on passing information which is longer than your screen through a data
165 pager, such as the common Unix less and more programs. In order to be able to
166 see this information in color, your pager needs to be properly configured. I
167 strongly recommend using less instead of more, as it seems that more simply can
167 168 not understand colored text correctly.
168 169
169 170 In order to configure less as your default pager, do the following:
170 171
171 172 1. Set the environment PAGER variable to less.
172 173 2. Set the environment LESS variable to -r (plus any other options
173 174 you always want to pass to less by default). This tells less to
174 175 properly interpret control sequences, which is how color
175 176 information is given to your terminal.
176 177
177 178 For the csh or tcsh shells, add to your ~/.cshrc file the lines::
178 179
179 180 setenv PAGER less
180 181 setenv LESS -r
181 182
182 183 There is similar syntax for other Unix shells, look at your system
183 184 documentation for details.
184 185
185 186 If you are on a system which lacks proper data pagers (such as Windows),
186 187 IPython will use a very limited builtin pager.
187 188
188 189 .. _emacs:
189 190
190 191 (X)Emacs configuration
191 192 ======================
192 193
193 194 Thanks to the work of Alexander Schmolck and Prabhu Ramachandran,
194 195 currently (X)Emacs and IPython get along very well.
195 196
196 197 Important note: You will need to use a recent enough version of
197 198 python-mode.el, along with the file ipython.el. You can check that the
198 199 version you have of python-mode.el is new enough by either looking at
199 200 the revision number in the file itself, or asking for it in (X)Emacs via
200 201 M-x py-version. Versions 4.68 and newer contain the necessary fixes for
201 202 proper IPython support.
202 203
203 204 The file ipython.el is included with the IPython distribution, in the
204 205 documentation directory (where this manual resides in PDF and HTML
205 206 formats).
206 207
207 208 Once you put these files in your Emacs path, all you need in your .emacs
208 209 file is::
209 210
210 211 (require 'ipython)
211 212
212 213 This should give you full support for executing code snippets via
213 214 IPython, opening IPython as your Python shell via ``C-c !``, etc.
214 215
215 216 You can customize the arguments passed to the IPython instance at startup by
216 217 setting the ``py-python-command-args`` variable. For example, to start always
217 218 in ``pylab`` mode with hardcoded light-background colors, you can use::
218 219
219 220 (setq py-python-command-args '("-pylab" "-colors" "LightBG"))
220 221
221 222 If you happen to get garbage instead of colored prompts as described in
222 223 the previous section, you may need to set also in your .emacs file::
223 224
224 225 (setq ansi-color-for-comint-mode t)
225 226
226 227 Notes:
227 228
228 229 * There is one caveat you should be aware of: you must start the
229 230 IPython shell before attempting to execute any code regions via
230 231 ``C-c |``. Simply type C-c ! to start IPython before passing any code
231 232 regions to the interpreter, and you shouldn't experience any
232 233 problems.
233 234 This is due to a bug in Python itself, which has been fixed for
234 235 Python 2.3, but exists as of Python 2.2.2 (reported as SF bug [
235 236 737947 ]).
236 237 * The (X)Emacs support is maintained by Alexander Schmolck, so all
237 238 comments/requests should be directed to him through the IPython
238 239 mailing lists.
239 240 * This code is still somewhat experimental so it's a bit rough
240 241 around the edges (although in practice, it works quite well).
241 242 * Be aware that if you customize py-python-command previously, this
242 243 value will override what ipython.el does (because loading the
243 244 customization variables comes later).
@@ -1,139 +1,207
1 1 .. _credits:
2 2
3 3 =======
4 4 Credits
5 5 =======
6 6
7 IPython is mainly developed by Fernando Pérez
8 <Fernando.Perez@colorado.edu>, but the project was born from mixing in
9 Fernando's code with the IPP project by Janko Hauser
10 <jhauser-AT-zscout.de> and LazyPython by Nathan Gray
11 <n8gray-AT-caltech.edu>. For all IPython-related requests, please
12 contact Fernando.
13
14 As of early 2006, the following developers have joined the core team:
15
16 * [Robert Kern] <rkern-AT-enthought.com>: co-mentored the 2005
17 Google Summer of Code project to develop python interactive
18 notebooks (XML documents) and graphical interface. This project
19 was awarded to the students Tzanko Matev <tsanko-AT-gmail.com> and
20 Toni Alatalo <antont-AT-an.org>
21 * [Brian Granger] <bgranger-AT-scu.edu>: extending IPython to allow
22 support for interactive parallel computing.
23 * [Ville Vainio] <vivainio-AT-gmail.com>: Ville is the new
24 maintainer for the main trunk of IPython after version 0.7.1.
7 IPython is led by Fernando Pérez.
8
9 As of this writing, the following developers have joined the core team:
10
11 * [Robert Kern] <rkern-AT-enthought.com>: co-mentored the 2005
12 Google Summer of Code project to develop python interactive
13 notebooks (XML documents) and graphical interface. This project
14 was awarded to the students Tzanko Matev <tsanko-AT-gmail.com> and
15 Toni Alatalo <antont-AT-an.org>.
16
17 * [Brian Granger] <ellisonbg-AT-gmail.com>: extending IPython to allow
18 support for interactive parallel computing.
19
20 * [Benjamin (Min) Ragan-Kelley]: key work on IPython's parallel
21 computing infrastructure.
22
23 * [Ville Vainio] <vivainio-AT-gmail.com>: Ville has made many improvements
24 to the core of IPython and was the maintainer of the main IPython
25 trunk from version 0.7.1 to 0.8.4.
26
27 * [Gael Varoquaux] <gael.varoquaux-AT-normalesup.org>: work on the merged
28 architecture for the interpreter as of version 0.9, implementing a new WX GUI
29 based on this system.
30
31 * [Barry Wark] <barrywark-AT-gmail.com>: implementing a new Cocoa GUI, as well
32 as work on the new interpreter architecture and Twisted support.
33
34 * [Laurent Dufrechou] <laurent.dufrechou-AT-gmail.com>: development of the WX
35 GUI support.
36
37 * [Jörgen Stenarson] <jorgen.stenarson-AT-bostream.nu>: maintainer of the
38 PyReadline project, necessary for IPython under windows.
39
25 40
26 41 The IPython project is also very grateful to:
27 42
28 43 Bill Bumgarner <bbum-AT-friday.com>: for providing the DPyGetOpt module
29 44 which gives very powerful and convenient handling of command-line
30 45 options (light years ahead of what Python 2.1.1's getopt module does).
31 46
32 47 Ka-Ping Yee <ping-AT-lfw.org>: for providing the Itpl module for
33 48 convenient and powerful string interpolation with a much nicer syntax
34 49 than formatting through the '%' operator.
35 50
36 51 Arnd Baecker <baecker-AT-physik.tu-dresden.de>: for his many very useful
37 52 suggestions and comments, and lots of help with testing and
38 53 documentation checking. Many of IPython's newer features are a result of
39 54 discussions with him (bugs are still my fault, not his).
40 55
41 56 Obviously Guido van Rossum and the whole Python development team, that
42 57 goes without saying.
43 58
44 59 IPython's website is generously hosted at http://ipython.scipy.orgby
45 60 Enthought (http://www.enthought.com). I am very grateful to them and all
46 61 of the SciPy team for their contribution.
47 62
48 63 Fernando would also like to thank Stephen Figgins <fig-AT-monitor.net>,
49 64 an O'Reilly Python editor. His Oct/11/2001 article about IPP and
50 65 LazyPython, was what got this project started. You can read it at:
51 66 http://www.onlamp.com/pub/a/python/2001/10/11/pythonnews.html.
52 67
53 And last but not least, all the kind IPython users who have emailed new
54 code, bug reports, fixes, comments and ideas. A brief list follows,
55 please let me know if I have ommitted your name by accident:
56
57 * [Jack Moffit] <jack-AT-xiph.org> Bug fixes, including the infamous
58 color problem. This bug alone caused many lost hours and
59 frustration, many thanks to him for the fix. I've always been a
60 fan of Ogg & friends, now I have one more reason to like these folks.
61 Jack is also contributing with Debian packaging and many other
62 things.
63 * [Alexander Schmolck] <a.schmolck-AT-gmx.net> Emacs work, bug
64 reports, bug fixes, ideas, lots more. The ipython.el mode for
65 (X)Emacs is Alex's code, providing full support for IPython under
66 (X)Emacs.
67 * [Andrea Riciputi] <andrea.riciputi-AT-libero.it> Mac OSX
68 information, Fink package management.
69 * [Gary Bishop] <gb-AT-cs.unc.edu> Bug reports, and patches to work
70 around the exception handling idiosyncracies of WxPython. Readline
71 and color support for Windows.
72 * [Jeffrey Collins] <Jeff.Collins-AT-vexcel.com> Bug reports. Much
73 improved readline support, including fixes for Python 2.3.
74 * [Dryice Liu] <dryice-AT-liu.com.cn> FreeBSD port.
75 * [Mike Heeter] <korora-AT-SDF.LONESTAR.ORG>
76 * [Christopher Hart] <hart-AT-caltech.edu> PDB integration.
77 * [Milan Zamazal] <pdm-AT-zamazal.org> Emacs info.
78 * [Philip Hisley] <compsys-AT-starpower.net>
79 * [Holger Krekel] <pyth-AT-devel.trillke.net> Tab completion, lots
80 more.
81 * [Robin Siebler] <robinsiebler-AT-starband.net>
82 * [Ralf Ahlbrink] <ralf_ahlbrink-AT-web.de>
83 * [Thorsten Kampe] <thorsten-AT-thorstenkampe.de>
84 * [Fredrik Kant] <fredrik.kant-AT-front.com> Windows setup.
85 * [Syver Enstad] <syver-en-AT-online.no> Windows setup.
86 * [Richard] <rxe-AT-renre-europe.com> Global embedding.
87 * [Hayden Callow] <h.callow-AT-elec.canterbury.ac.nz> Gnuplot.py 1.6
88 compatibility.
89 * [Leonardo Santagada] <retype-AT-terra.com.br> Fixes for Windows
90 installation.
91 * [Christopher Armstrong] <radix-AT-twistedmatrix.com> Bugfixes.
92 * [Francois Pinard] <pinard-AT-iro.umontreal.ca> Code and
93 documentation fixes.
94 * [Cory Dodt] <cdodt-AT-fcoe.k12.ca.us> Bug reports and Windows
95 ideas. Patches for Windows installer.
96 * [Olivier Aubert] <oaubert-AT-bat710.univ-lyon1.fr> New magics.
97 * [King C. Shu] <kingshu-AT-myrealbox.com> Autoindent patch.
98 * [Chris Drexler] <chris-AT-ac-drexler.de> Readline packages for
99 Win32/CygWin.
100 * [Gustavo Cordova Avila] <gcordova-AT-sismex.com> EvalDict code for
101 nice, lightweight string interpolation.
102 * [Kasper Souren] <Kasper.Souren-AT-ircam.fr> Bug reports, ideas.
103 * [Gever Tulley] <gever-AT-helium.com> Code contributions.
104 * [Ralf Schmitt] <ralf-AT-brainbot.com> Bug reports & fixes.
105 * [Oliver Sander] <osander-AT-gmx.de> Bug reports.
106 * [Rod Holland] <rhh-AT-structurelabs.com> Bug reports and fixes to
107 logging module.
108 * [Daniel 'Dang' Griffith] <pythondev-dang-AT-lazytwinacres.net>
109 Fixes, enhancement suggestions for system shell use.
110 * [Viktor Ransmayr] <viktor.ransmayr-AT-t-online.de> Tests and
111 reports on Windows installation issues. Contributed a true Windows
112 binary installer.
113 * [Mike Salib] <msalib-AT-mit.edu> Help fixing a subtle bug related
114 to traceback printing.
115 * [W.J. van der Laan] <gnufnork-AT-hetdigitalegat.nl> Bash-like
116 prompt specials.
117 * [Antoon Pardon] <Antoon.Pardon-AT-rece.vub.ac.be> Critical fix for
118 the multithreaded IPython.
119 * [John Hunter] <jdhunter-AT-nitace.bsd.uchicago.edu> Matplotlib
120 author, helped with all the development of support for matplotlib
121 in IPyhton, including making necessary changes to matplotlib itself.
122 * [Matthew Arnison] <maffew-AT-cat.org.au> Bug reports, '%run -d' idea.
123 * [Prabhu Ramachandran] <prabhu_r-AT-users.sourceforge.net> Help
124 with (X)Emacs support, threading patches, ideas...
125 * [Norbert Tretkowski] <tretkowski-AT-inittab.de> help with Debian
126 packaging and distribution.
127 * [George Sakkis] <gsakkis-AT-eden.rutgers.edu> New matcher for
128 tab-completing named arguments of user-defined functions.
129 * [Jörgen Stenarson] <jorgen.stenarson-AT-bostream.nu> Wildcard
130 support implementation for searching namespaces.
131 * [Vivian De Smedt] <vivian-AT-vdesmedt.com> Debugger enhancements,
132 so that when pdb is activated from within IPython, coloring, tab
133 completion and other features continue to work seamlessly.
134 * [Scott Tsai] <scottt958-AT-yahoo.com.tw> Support for automatic
135 editor invocation on syntax errors (see
136 http://www.scipy.net/roundup/ipython/issue36).
137 * [Alexander Belchenko] <bialix-AT-ukr.net> Improvements for win32
138 paging system.
139 * [Will Maier] <willmaier-AT-ml1.net> Official OpenBSD port. No newline at end of file
68 And last but not least, all the kind IPython users who have emailed new code,
69 bug reports, fixes, comments and ideas. A brief list follows, please let us
70 know if we have ommitted your name by accident:
71
72 * Dan Milstein <danmil-AT-comcast.net>. A bold refactoring of the
73 core prefilter stuff in the IPython interpreter.
74
75 * [Jack Moffit] <jack-AT-xiph.org> Bug fixes, including the infamous
76 color problem. This bug alone caused many lost hours and
77 frustration, many thanks to him for the fix. I've always been a
78 fan of Ogg & friends, now I have one more reason to like these folks.
79 Jack is also contributing with Debian packaging and many other
80 things.
81
82 * [Alexander Schmolck] <a.schmolck-AT-gmx.net> Emacs work, bug
83 reports, bug fixes, ideas, lots more. The ipython.el mode for
84 (X)Emacs is Alex's code, providing full support for IPython under
85 (X)Emacs.
86
87 * [Andrea Riciputi] <andrea.riciputi-AT-libero.it> Mac OSX
88 information, Fink package management.
89
90 * [Gary Bishop] <gb-AT-cs.unc.edu> Bug reports, and patches to work
91 around the exception handling idiosyncracies of WxPython. Readline
92 and color support for Windows.
93
94 * [Jeffrey Collins] <Jeff.Collins-AT-vexcel.com> Bug reports. Much
95 improved readline support, including fixes for Python 2.3.
96
97 * [Dryice Liu] <dryice-AT-liu.com.cn> FreeBSD port.
98
99 * [Mike Heeter] <korora-AT-SDF.LONESTAR.ORG>
100
101 * [Christopher Hart] <hart-AT-caltech.edu> PDB integration.
102
103 * [Milan Zamazal] <pdm-AT-zamazal.org> Emacs info.
104
105 * [Philip Hisley] <compsys-AT-starpower.net>
106
107 * [Holger Krekel] <pyth-AT-devel.trillke.net> Tab completion, lots
108 more.
109
110 * [Robin Siebler] <robinsiebler-AT-starband.net>
111
112 * [Ralf Ahlbrink] <ralf_ahlbrink-AT-web.de>
113
114 * [Thorsten Kampe] <thorsten-AT-thorstenkampe.de>
115
116 * [Fredrik Kant] <fredrik.kant-AT-front.com> Windows setup.
117
118 * [Syver Enstad] <syver-en-AT-online.no> Windows setup.
119
120 * [Richard] <rxe-AT-renre-europe.com> Global embedding.
121
122 * [Hayden Callow] <h.callow-AT-elec.canterbury.ac.nz> Gnuplot.py 1.6
123 compatibility.
124
125 * [Leonardo Santagada] <retype-AT-terra.com.br> Fixes for Windows
126 installation.
127
128 * [Christopher Armstrong] <radix-AT-twistedmatrix.com> Bugfixes.
129
130 * [Francois Pinard] <pinard-AT-iro.umontreal.ca> Code and
131 documentation fixes.
132
133 * [Cory Dodt] <cdodt-AT-fcoe.k12.ca.us> Bug reports and Windows
134 ideas. Patches for Windows installer.
135
136 * [Olivier Aubert] <oaubert-AT-bat710.univ-lyon1.fr> New magics.
137
138 * [King C. Shu] <kingshu-AT-myrealbox.com> Autoindent patch.
139
140 * [Chris Drexler] <chris-AT-ac-drexler.de> Readline packages for
141 Win32/CygWin.
142
143 * [Gustavo Cordova Avila] <gcordova-AT-sismex.com> EvalDict code for
144 nice, lightweight string interpolation.
145
146 * [Kasper Souren] <Kasper.Souren-AT-ircam.fr> Bug reports, ideas.
147
148 * [Gever Tulley] <gever-AT-helium.com> Code contributions.
149
150 * [Ralf Schmitt] <ralf-AT-brainbot.com> Bug reports & fixes.
151
152 * [Oliver Sander] <osander-AT-gmx.de> Bug reports.
153
154 * [Rod Holland] <rhh-AT-structurelabs.com> Bug reports and fixes to
155 logging module.
156
157 * [Daniel 'Dang' Griffith] <pythondev-dang-AT-lazytwinacres.net>
158 Fixes, enhancement suggestions for system shell use.
159
160 * [Viktor Ransmayr] <viktor.ransmayr-AT-t-online.de> Tests and
161 reports on Windows installation issues. Contributed a true Windows
162 binary installer.
163
164 * [Mike Salib] <msalib-AT-mit.edu> Help fixing a subtle bug related
165 to traceback printing.
166
167 * [W.J. van der Laan] <gnufnork-AT-hetdigitalegat.nl> Bash-like
168 prompt specials.
169
170 * [Antoon Pardon] <Antoon.Pardon-AT-rece.vub.ac.be> Critical fix for
171 the multithreaded IPython.
172
173 * [John Hunter] <jdhunter-AT-nitace.bsd.uchicago.edu> Matplotlib
174 author, helped with all the development of support for matplotlib
175 in IPyhton, including making necessary changes to matplotlib itself.
176
177 * [Matthew Arnison] <maffew-AT-cat.org.au> Bug reports, '%run -d' idea.
178
179 * [Prabhu Ramachandran] <prabhu_r-AT-users.sourceforge.net> Help
180 with (X)Emacs support, threading patches, ideas...
181
182 * [Norbert Tretkowski] <tretkowski-AT-inittab.de> help with Debian
183 packaging and distribution.
184
185 * [George Sakkis] <gsakkis-AT-eden.rutgers.edu> New matcher for
186 tab-completing named arguments of user-defined functions.
187
188 * [Jörgen Stenarson] <jorgen.stenarson-AT-bostream.nu> Wildcard
189 support implementation for searching namespaces.
190
191 * [Vivian De Smedt] <vivian-AT-vdesmedt.com> Debugger enhancements,
192 so that when pdb is activated from within IPython, coloring, tab
193 completion and other features continue to work seamlessly.
194
195 * [Scott Tsai] <scottt958-AT-yahoo.com.tw> Support for automatic
196 editor invocation on syntax errors (see
197 http://www.scipy.net/roundup/ipython/issue36).
198
199 * [Alexander Belchenko] <bialix-AT-ukr.net> Improvements for win32
200 paging system.
201
202 * [Will Maier] <willmaier-AT-ml1.net> Official OpenBSD port.
203
204 * [Ondrej Certik] <ondrej-AT-certik.cz>: set up the IPython docs to use the new
205 Sphinx system used by Python, Matplotlib and many more projects.
206
207 * [Stefan van der Walt] <stefan-AT-sun.ac.za>: support for the new config system.
@@ -1,360 +1,310
1 1 .. _development:
2 2
3 ==================================
3 ==============================
4 4 IPython development guidelines
5 ==================================
6
7 .. contents::
5 ==============================
8 6
9 7
10 8 Overview
11 9 ========
12 10
13 IPython is the next generation of IPython. It is named such for two reasons:
14
15 - Eventually, IPython will become IPython version 1.0.
16 - This new code base needs to be able to co-exist with the existing IPython until
17 it is a full replacement for it. Thus we needed a different name. We couldn't
18 use ``ipython`` (lowercase) as some files systems are case insensitive.
19
20 There are two, no three, main goals of the IPython effort:
21
22 1. Clean up the existing codebase and write lots of tests.
23 2. Separate the core functionality of IPython from the terminal to enable IPython
24 to be used from within a variety of GUI applications.
25 3. Implement a system for interactive parallel computing.
26
27 While the third goal may seem a bit unrelated to the main focus of IPython, it turns
28 out that the technologies required for this goal are nearly identical with those
29 required for goal two. This is the main reason the interactive parallel computing
30 capabilities are being put into IPython proper. Currently the third of these goals is
31 furthest along.
32
33 This document describes IPython from the perspective of developers.
11 This document describes IPython from the perspective of developers. Most
12 importantly, it gives information for people who want to contribute to the
13 development of IPython. So if you want to help out, read on!
14
15 How to contribute to IPython
16 ============================
17
18 IPython development is done using Bazaar [Bazaar]_ and Launchpad [Launchpad]_.
19 This makes it easy for people to contribute to the development of IPython.
20 Here is a sketch of how to get going.
21
22 Install Bazaar and create a Launchpad account
23 ---------------------------------------------
34 24
25 First make sure you have installed Bazaar (see their `website
26 <http://bazaar-vcs.org/>`_). To see that Bazaar is installed and knows about
27 you, try the following::
35 28
36 Project organization
37 ====================
38
39 Subpackages
40 -----------
29 $ bzr whoami
30 Joe Coder <jcoder@gmail.com>
41 31
42 IPython is organized into semi self-contained subpackages. Each of the subpackages will have its own:
43
44 - **Dependencies**. One of the most important things to keep in mind in
45 partitioning code amongst subpackages, is that they should be used to cleanly
46 encapsulate dependencies.
47 - **Tests**. Each subpackage shoud have its own ``tests`` subdirectory that
48 contains all of the tests for that package. For information about writing tests
49 for IPython, see the `Testing System`_ section of this document.
50 - **Configuration**. Each subpackage should have its own ``config`` subdirectory
51 that contains the configuration information for the components of the
52 subpackage. For information about how the IPython configuration system
53 works, see the `Configuration System`_ section of this document.
54 - **Scripts**. Each subpackage should have its own ``scripts`` subdirectory that
55 contains all of the command line scripts associated with the subpackage.
32 This should display your name and email. Next, you will want to create an
33 account on the `Launchpad website <http://www.launchpad.net>`_ and setup your
34 ssh keys. For more information of setting up your ssh keys, see `this link
35 <https://help.launchpad.net/YourAccount/CreatingAnSSHKeyPair>`_.
56 36
57 Installation and dependencies
58 -----------------------------
59
60 IPython will not use `setuptools`_ for installation. Instead, we will use standard
61 ``setup.py`` scripts that use `distutils`_. While there are a number a extremely nice
62 features that `setuptools`_ has (like namespace packages), the current implementation
63 of `setuptools`_ has performance problems, particularly on shared file systems. In
64 particular, when Python packages are installed on NSF file systems, import times
65 become much too long (up towards 10 seconds).
37 Get the main IPython branch from Launchpad
38 ------------------------------------------
66 39
67 Because IPython is being used extensively in the context of high performance
68 computing, where performance is critical but shared file systems are common, we feel
69 these performance hits are not acceptable. Thus, until the performance problems
70 associated with `setuptools`_ are addressed, we will stick with plain `distutils`_. We
71 are hopeful that these problems will be addressed and that we will eventually begin
72 using `setuptools`_. Because of this, we are trying to organize IPython in a way that
73 will make the eventual transition to `setuptools`_ as painless as possible.
40 Now, you can get a copy of the main IPython development branch (we call this
41 the "trunk")::
74 42
75 Because we will be using `distutils`_, there will be no method for automatically installing dependencies. Instead, we are following the approach of `Matplotlib`_ which can be summarized as follows:
43 $ bzr branch lp:ipython
76 44
77 - Distinguish between required and optional dependencies. However, the required
78 dependencies for IPython should be only the Python standard library.
79 - Upon installation check to see which optional dependencies are present and tell
80 the user which parts of IPython need which optional dependencies.
45 Create a working branch
46 -----------------------
81 47
82 It is absolutely critical that each subpackage of IPython has a clearly specified set
83 of dependencies and that dependencies are not carelessly inherited from other IPython
84 subpackages. Furthermore, tests that have certain dependencies should not fail if
85 those dependencies are not present. Instead they should be skipped and print a
86 message.
48 When working on IPython, you won't actually make edits directly to the
49 :file:`lp:ipython` branch. Instead, you will create a separate branch for your
50 changes. For now, let's assume you want to do your work in a branch named
51 "ipython-mybranch". Create this branch by doing::
87 52
88 .. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools
89 .. _distutils: http://docs.python.org/lib/module-distutils.html
90 .. _Matplotlib: http://matplotlib.sourceforge.net/
53 $ bzr branch ipython ipython-mybranch
91 54
92 Specific subpackages
93 --------------------
55 When you actually create a branch, you will want to give it a name that
56 reflects the nature of the work that you will be doing in it, like
57 "install-docs-update".
94 58
95 ``core``
96 This is the core functionality of IPython that is independent of the
97 terminal, network and GUIs. Most of the code that is in the current
98 IPython trunk will be refactored, cleaned up and moved here.
59 Make edits in your working branch
60 ---------------------------------
99 61
100 ``kernel``
101 The enables the IPython core to be expose to a the network. This is
102 also where all of the parallel computing capabilities are to be found.
103
104 ``config``
105 The configuration package used by IPython.
62 Now you are ready to actually make edits in your :file:`ipython-mybranch`
63 branch. Before doing this, it is helpful to install this branch so you can
64 test your changes as you work. This is easiest if you have setuptools
65 installed. Then, just do::
106 66
107 ``frontends``
108 The various frontends for IPython. A frontend is the end-user application
109 that exposes the capabilities of IPython to the user. The most basic frontend
110 will simply be a terminal based application that looks just like today 's
111 IPython. Other frontends will likely be more powerful and based on GUI toolkits.
112
113 ``notebook``
114 An application that allows users to work with IPython notebooks.
115
116 ``tools``
117 This is where general utilities go.
67 $ cd ipython-mybranch
68 $ python setupegg.py develop
118 69
70 Now, make some changes. After a while, you will want to commit your changes.
71 This let's Bazaar know that you like the changes you have made and gives you
72 an opportunity to keep a nice record of what you have done. This looks like
73 this::
119 74
120 Version control
121 ===============
122
123 In the past, IPython development has been done using `Subversion`__. Recently, we made the transition to using `Bazaar`__ and `Launchpad`__. This makes it much easier for people
124 to contribute code to IPython. Here is a sketch of how to use Bazaar for IPython
125 development. First, you should install Bazaar. After you have done that, make
126 sure that it is working by getting the latest main branch of IPython::
127
128 $ bzr branch lp:ipython
129
130 Now you can create a new branch for you to do your work in::
75 $ ...do work in ipython-mybranch...
76 $ bzr commit -m "the commit message goes here"
131 77
132 $ bzr branch ipython ipython-mybranch
133
134 The typical work cycle in this branch will be to make changes in `ipython-mybranch`
135 and then commit those changes using the commit command::
136
137 $ ...do work in ipython-mybranch...
138 $ bzr ci -m "the commit message goes here"
139
140 Please note that since we now don't use an old-style linear ChangeLog
141 (that tends to cause problems with distributed version control
142 systems), you should ensure that your log messages are reasonably
143 detailed. Use a docstring-like approach in the commit messages
144 (including the second line being left *blank*)::
78 Please note that since we now don't use an old-style linear ChangeLog (that
79 tends to cause problems with distributed version control systems), you should
80 ensure that your log messages are reasonably detailed. Use a docstring-like
81 approach in the commit messages (including the second line being left
82 *blank*)::
145 83
146 84 Single line summary of changes being committed.
147 85
148 - more details when warranted ...
149 - including crediting outside contributors if they sent the
86 * more details when warranted ...
87 * including crediting outside contributors if they sent the
150 88 code/bug/idea!
151 89
152 If we couple this with a policy of making single commits for each
153 reasonably atomic change, the bzr log should give an excellent view of
154 the project, and the `--short` log option becomes a nice summary.
90 As you work, you will repeat this edit/commit cycle many times. If you work on
91 your branch for a long time, you will also want to get the latest changes from
92 the :file:`lp:ipython` branch. This can be done with the following sequence of
93 commands::
155 94
156 While working with this branch, it is a good idea to merge in changes that have been
157 made upstream in the parent branch. This can be done by doing::
95 $ ls
96 ipython
97 ipython-mybranch
98
99 $ cd ipython
100 $ bzr pull
101 $ cd ../ipython-mybranch
102 $ bzr merge ../ipython
103 $ bzr commit -m "Merging changes from trunk"
158 104
159 $ bzr pull
160
161 If this command shows that the branches have diverged, then you should do a merge
162 instead::
105 Along the way, you should also run the IPython test suite. You can do this using the :command:`iptest` command::
163 106
164 $ bzr merge lp:ipython
107 $ cd
108 $ iptest
165 109
166 If you want others to be able to see your branch, you can create an account with
167 launchpad and push the branch to your own workspace::
110 The :command:`iptest` command will also pick up and run any tests you have written.
168 111
169 $ bzr push bzr+ssh://<me>@bazaar.launchpad.net/~<me>/+junk/ipython-mybranch
112 Post your branch and request a code review
113 ------------------------------------------
170 114
171 Finally, once the work in your branch is done, you can merge your changes back into
172 the `ipython` branch by using merge::
115 Once you are done with your edits, you should post your branch on Launchpad so
116 that other IPython developers can review the changes and help you merge your
117 changes into the main development branch. To post your branch on Launchpad,
118 do::
173 119
174 $ cd ipython
175 $ merge ../ipython-mybranch
176 [resolve any conflicts]
177 $ bzr ci -m "Fixing that bug"
178 $ bzr push
120 $ cd ipython-mybranch
121 $ bzr push lp:~yourusername/ipython/ipython-mybranch
179 122
180 But this will require you to have write permissions to the `ipython` branch. It you don't
181 you can tell one of the IPython devs about your branch and they can do the merge for you.
123 Then, go to the `IPython Launchpad site <www.launchpad.net/ipython>`_, and you
124 should see your branch under the "Code" tab. If you click on your branch, you
125 can provide a short description of the branch as well as mark its status. Most
126 importantly, you should click the link that reads "Propose for merging into
127 another branch". What does this do?
182 128
183 More information about Bazaar workflows can be found `here`__.
129 This let's the other IPython developers know that your branch is ready to be
130 reviewed and merged into the main development branch. During this review
131 process, other developers will give you feedback and help you get your code
132 ready to be merged. What types of things will we be looking for:
184 133
185 .. __: http://subversion.tigris.org/
186 .. __: http://bazaar-vcs.org/
187 .. __: http://www.launchpad.net/ipython
188 .. __: http://doc.bazaar-vcs.org/bzr.dev/en/user-guide/index.html
134 * All code is documented.
135 * All code has tests.
136 * The entire IPython test suite passes.
137
138 Once your changes have been reviewed and approved, someone will merge them
139 into the main development branch.
189 140
190 141 Documentation
191 142 =============
192 143
193 144 Standalone documentation
194 145 ------------------------
195 146
196 All standalone documentation should be written in plain text (``.txt``) files using
197 `reStructuredText`_ for markup and formatting. All such documentation should be placed
198 in the top level directory ``docs`` of the IPython source tree. Or, when appropriate,
199 a suitably named subdirectory should be used. The documentation in this location will
200 serve as the main source for IPython documentation and all existing documentation
201 should be converted to this format.
147 All standalone documentation should be written in plain text (``.txt``) files
148 using reStructuredText [reStructuredText]_ for markup and formatting. All such
149 documentation should be placed in directory :file:`docs/source` of the IPython
150 source tree. The documentation in this location will serve as the main source
151 for IPython documentation and all existing documentation should be converted
152 to this format.
202 153
203 In the future, the text files in the ``docs`` directory will be used to generate all
204 forms of documentation for IPython. This include documentation on the IPython website
205 as well as *pdf* documentation.
154 To build the final documentation, we use Sphinx [Sphinx]_. Once you have Sphinx installed, you can build the html docs yourself by doing::
206 155
207 .. _reStructuredText: http://docutils.sourceforge.net/rst.html
156 $ cd ipython-mybranch/docs
157 $ make html
208 158
209 159 Docstring format
210 160 ----------------
211 161
212 Good docstrings are very important. All new code will use `Epydoc`_ for generating API
213 docs, so we will follow the `Epydoc`_ conventions. More specifically, we will use
214 `reStructuredText`_ for markup and formatting, since it is understood by a wide
215 variety of tools. This means that if in the future we have any reason to change from
216 `Epydoc`_ to something else, we'll have fewer transition pains.
217
218 Details about using `reStructuredText`_ for docstrings can be found `here
162 Good docstrings are very important. All new code should have docstrings that
163 are formatted using reStructuredText for markup and formatting, since it is
164 understood by a wide variety of tools. Details about using reStructuredText
165 for docstrings can be found `here
219 166 <http://epydoc.sourceforge.net/manual-othermarkup.html>`_.
220 167
221 .. _Epydoc: http://epydoc.sourceforge.net/
222
223 168 Additional PEPs of interest regarding documentation of code:
224 169
225 - `Docstring Conventions <http://www.python.org/peps/pep-0257.html>`_
226 - `Docstring Processing System Framework <http://www.python.org/peps/pep-0256.html>`_
227 - `Docutils Design Specification <http://www.python.org/peps/pep-0258.html>`_
170 * `Docstring Conventions <http://www.python.org/peps/pep-0257.html>`_
171 * `Docstring Processing System Framework <http://www.python.org/peps/pep-0256.html>`_
172 * `Docutils Design Specification <http://www.python.org/peps/pep-0258.html>`_
228 173
229 174
230 175 Coding conventions
231 176 ==================
232 177
233 178 General
234 179 -------
235 180
236 In general, we'll try to follow the standard Python style conventions as described here:
181 In general, we'll try to follow the standard Python style conventions as
182 described here:
237 183
238 - `Style Guide for Python Code <http://www.python.org/peps/pep-0008.html>`_
184 * `Style Guide for Python Code <http://www.python.org/peps/pep-0008.html>`_
239 185
240 186
241 187 Other comments:
242 188
243 - In a large file, top level classes and functions should be
189 * In a large file, top level classes and functions should be
244 190 separated by 2-3 lines to make it easier to separate them visually.
245 - Use 4 spaces for indentation.
246 - Keep the ordering of methods the same in classes that have the same
247 methods. This is particularly true for classes that implement
248 similar interfaces and for interfaces that are similar.
191 * Use 4 spaces for indentation.
192 * Keep the ordering of methods the same in classes that have the same
193 methods. This is particularly true for classes that implement an interface.
249 194
250 195 Naming conventions
251 196 ------------------
252 197
253 In terms of naming conventions, we'll follow the guidelines from the `Style Guide for
254 Python Code`_.
198 In terms of naming conventions, we'll follow the guidelines from the `Style
199 Guide for Python Code`_.
255 200
256 201 For all new IPython code (and much existing code is being refactored), we'll use:
257 202
258 - All ``lowercase`` module names.
203 * All ``lowercase`` module names.
259 204
260 - ``CamelCase`` for class names.
205 * ``CamelCase`` for class names.
261 206
262 - ``lowercase_with_underscores`` for methods, functions, variables and attributes.
207 * ``lowercase_with_underscores`` for methods, functions, variables and
208 attributes.
263 209
264 This may be confusing as most of the existing IPython codebase uses a different convention (``lowerCamelCase`` for methods and attributes). Slowly, we will move IPython over to the new
265 convention, providing shadow names for backward compatibility in public interfaces.
210 There are, however, some important exceptions to these rules. In some cases,
211 IPython code will interface with packages (Twisted, Wx, Qt) that use other
212 conventions. At some level this makes it impossible to adhere to our own
213 standards at all times. In particular, when subclassing classes that use other
214 naming conventions, you must follow their naming conventions. To deal with
215 cases like this, we propose the following policy:
266 216
267 There are, however, some important exceptions to these rules. In some cases, IPython
268 code will interface with packages (Twisted, Wx, Qt) that use other conventions. At some level this makes it impossible to adhere to our own standards at all times. In particular, when subclassing classes that use other naming conventions, you must follow their naming conventions. To deal with cases like this, we propose the following policy:
217 * If you are subclassing a class that uses different conventions, use its
218 naming conventions throughout your subclass. Thus, if you are creating a
219 Twisted Protocol class, used Twisted's
220 ``namingSchemeForMethodsAndAttributes.``
269 221
270 - If you are subclassing a class that uses different conventions, use its
271 naming conventions throughout your subclass. Thus, if you are creating a
272 Twisted Protocol class, used Twisted's ``namingSchemeForMethodsAndAttributes.``
222 * All IPython's official interfaces should use our conventions. In some cases
223 this will mean that you need to provide shadow names (first implement
224 ``fooBar`` and then ``foo_bar = fooBar``). We want to avoid this at all
225 costs, but it will probably be necessary at times. But, please use this
226 sparingly!
273 227
274 - All IPython's official interfaces should use our conventions. In some cases
275 this will mean that you need to provide shadow names (first implement ``fooBar``
276 and then ``foo_bar = fooBar``). We want to avoid this at all costs, but it
277 will probably be necessary at times. But, please use this sparingly!
228 Implementation-specific *private* methods will use
229 ``_single_underscore_prefix``. Names with a leading double underscore will
230 *only* be used in special cases, as they makes subclassing difficult (such
231 names are not easily seen by child classes).
278 232
279 Implementation-specific *private* methods will use ``_single_underscore_prefix``.
280 Names with a leading double underscore will *only* be used in special cases, as they
281 makes subclassing difficult (such names are not easily seen by child classes).
282
283 Occasionally some run-in lowercase names are used, but mostly for very short names or
284 where we are implementing methods very similar to existing ones in a base class (like
285 ``runlines()`` where ``runsource()`` and ``runcode()`` had established precedent).
233 Occasionally some run-in lowercase names are used, but mostly for very short
234 names or where we are implementing methods very similar to existing ones in a
235 base class (like ``runlines()`` where ``runsource()`` and ``runcode()`` had
236 established precedent).
286 237
287 238 The old IPython codebase has a big mix of classes and modules prefixed with an
288 explicit ``IP``. In Python this is mostly unnecessary, redundant and frowned upon, as
289 namespaces offer cleaner prefixing. The only case where this approach is justified is
290 for classes which are expected to be imported into external namespaces and a very
291 generic name (like Shell) is too likely to clash with something else. We'll need to
292 revisit this issue as we clean up and refactor the code, but in general we should
293 remove as many unnecessary ``IP``/``ip`` prefixes as possible. However, if a prefix
294 seems absolutely necessary the more specific ``IPY`` or ``ipy`` are preferred.
239 explicit ``IP``. In Python this is mostly unnecessary, redundant and frowned
240 upon, as namespaces offer cleaner prefixing. The only case where this approach
241 is justified is for classes which are expected to be imported into external
242 namespaces and a very generic name (like Shell) is too likely to clash with
243 something else. We'll need to revisit this issue as we clean up and refactor
244 the code, but in general we should remove as many unnecessary ``IP``/``ip``
245 prefixes as possible. However, if a prefix seems absolutely necessary the more
246 specific ``IPY`` or ``ipy`` are preferred.
295 247
296 248 .. _devel_testing:
297 249
298 250 Testing system
299 251 ==============
300 252
301 It is extremely important that all code contributed to IPython has tests. Tests should
302 be written as unittests, doctests or as entities that the `Nose`_ testing package will
303 find. Regardless of how the tests are written, we will use `Nose`_ for discovering and
304 running the tests. `Nose`_ will be required to run the IPython test suite, but will
305 not be required to simply use IPython.
306
307 .. _Nose: http://code.google.com/p/python-nose/
253 It is extremely important that all code contributed to IPython has tests.
254 Tests should be written as unittests, doctests or as entities that the Nose
255 [Nose]_ testing package will find. Regardless of how the tests are written, we
256 will use Nose for discovering and running the tests. Nose will be required to
257 run the IPython test suite, but will not be required to simply use IPython.
308 258
309 Tests of `Twisted`__ using code should be written by subclassing the ``TestCase`` class
310 that comes with ``twisted.trial.unittest``. When this is done, `Nose`_ will be able to
311 run the tests and the twisted reactor will be handled correctly.
259 Tests of Twisted using code need to follow two additional guidelines:
312 260
313 .. __: http://www.twistedmatrix.com
261 1. Twisted using tests should be written by subclassing the :class:`TestCase`
262 class that comes with :mod:`twisted.trial.unittest`.
314 263
315 Each subpackage in IPython should have its own ``tests`` directory that contains all
316 of the tests for that subpackage. This allows each subpackage to be self-contained. If
317 a subpackage has any dependencies beyond the Python standard library, the tests for
318 that subpackage should be skipped if the dependencies are not found. This is very
319 important so users don't get tests failing simply because they don't have dependencies.
264 2. All :class:`Deferred` instances that are created in the test must be
265 properly chained and the final one *must* be the return value of the test
266 method.
320 267
321 We also need to look into use Noses ability to tag tests to allow a more modular
322 approach of running tests.
323
324 .. _devel_config:
268 When these two things are done, Nose will be able to run the tests and the
269 twisted reactor will be handled correctly.
325 270
326 Configuration system
327 ====================
271 Each subpackage in IPython should have its own :file:`tests` directory that
272 contains all of the tests for that subpackage. This allows each subpackage to
273 be self-contained. If a subpackage has any dependencies beyond the Python
274 standard library, the tests for that subpackage should be skipped if the
275 dependencies are not found. This is very important so users don't get tests
276 failing simply because they don't have dependencies.
328 277
329 IPython uses `.ini`_ files for configuration purposes. This represents a huge
330 improvement over the configuration system used in IPython. IPython works with these
331 files using the `ConfigObj`_ package, which IPython includes as
332 ``ipython1/external/configobj.py``.
278 To run the IPython test suite, use the :command:`iptest` command that is installed with IPython::
333 279
334 Currently, we are using raw `ConfigObj`_ objects themselves. Each subpackage of IPython
335 should contain a ``config`` subdirectory that contains all of the configuration
336 information for the subpackage. To see how configuration information is defined (along
337 with defaults) see at the examples in ``ipython1/kernel/config`` and
338 ``ipython1/core/config``. Likewise, to see how the configuration information is used,
339 see examples in ``ipython1/kernel/scripts/ipengine.py``.
340
341 Eventually, we will add a new layer on top of the raw `ConfigObj`_ objects. We are
342 calling this new layer, ``tconfig``, as it will use a `Traits`_-like validation model.
343 We won't actually use `Traits`_, but will implement something similar in pure Python.
344 But, even in this new system, we will still use `ConfigObj`_ and `.ini`_ files
345 underneath the hood. Talk to Fernando if you are interested in working on this part of
346 IPython. The current prototype of ``tconfig`` is located in the IPython sandbox.
347
348 .. _.ini: http://docs.python.org/lib/module-ConfigParser.html
349 .. _ConfigObj: http://www.voidspace.org.uk/python/configobj.html
350 .. _Traits: http://code.enthought.com/traits/
280 $ iptest
351 281
282 This command runs Nose with the proper options and extensions.
352 283
284 .. _devel_config:
353 285
286 Release checklist
287 =================
354 288
289 Most of the release process is automated by the :file:`release` script in the
290 :file:`tools` directory. This is just a handy reminder for the release manager.
355 291
292 #. Run the release script, which makes the tar.gz, eggs and Win32 .exe
293 installer. It posts them to the site and registers the release with PyPI.
356 294
295 #. Updating the website with announcements and links to the updated
296 changes.txt in html form. Remember to put a short note both on the news
297 page of the site and on Launcphad.
357 298
299 #. Drafting a short release announcement with i) highlights and ii) a link to
300 the html changes.txt.
358 301
302 #. Make sure that the released version of the docs is live on the site.
359 303
304 #. Celebrate!
360 305
306 .. [Bazaar] Bazaar. http://bazaar-vcs.org/
307 .. [Launchpad] Launchpad. http://www.launchpad.net/ipython
308 .. [reStructuredText] reStructuredText. http://docutils.sourceforge.net/rst.html
309 .. [Sphinx] Sphinx. http://sphinx.pocoo.org/
310 .. [Nose] Nose: a discovery based unittest extension. http://code.google.com/p/python-nose/
@@ -1,9 +1,11
1 1 ==================
2 2 Development
3 3 ==================
4 4
5 5 .. toctree::
6 6 :maxdepth: 2
7 7
8 8 development.txt
9 9 roadmap.txt
10 notification_blueprint.txt
11 config_blueprint.txt
@@ -1,47 +1,83
1 .. Notification:
1 .. _notification:
2 2
3 3 ==========================================
4 4 IPython.kernel.core.notification blueprint
5 5 ==========================================
6 6
7 7 Overview
8 8 ========
9 The :mod:`IPython.kernel.core.notification` module will provide a simple implementation of a notification center and support for the observer pattern within the :mod:`IPython.kernel.core`. The main intended use case is to provide notification of Interpreter events to an observing frontend during the execution of a single block of code.
9
10 The :mod:`IPython.kernel.core.notification` module will provide a simple
11 implementation of a notification center and support for the observer pattern
12 within the :mod:`IPython.kernel.core`. The main intended use case is to
13 provide notification of Interpreter events to an observing frontend during the
14 execution of a single block of code.
10 15
11 16 Functional Requirements
12 17 =======================
18
13 19 The notification center must:
14 * Provide synchronous notification of events to all registered observers.
15 * Provide typed or labeled notification types
16 * Allow observers to register callbacks for individual or all notification types
17 * Allow observers to register callbacks for events from individual or all notifying objects
18 * Notification to the observer consists of the notification type, notifying object and user-supplied extra information [implementation: as keyword parameters to the registered callback]
19 * Perform as O(1) in the case of no registered observers.
20 * Permit out-of-process or cross-network extension.
21
20
21 * Provide synchronous notification of events to all registered observers.
22
23 * Provide typed or labeled notification types.
24
25 * Allow observers to register callbacks for individual or all notification
26 types.
27
28 * Allow observers to register callbacks for events from individual or all
29 notifying objects.
30
31 * Notification to the observer consists of the notification type, notifying
32 object and user-supplied extra information [implementation: as keyword
33 parameters to the registered callback].
34
35 * Perform as O(1) in the case of no registered observers.
36
37 * Permit out-of-process or cross-network extension.
38
22 39 What's not included
23 ==============================================================
40 ===================
41
24 42 As written, the :mod:`IPython.kernel.core.notificaiton` module does not:
25 * Provide out-of-process or network notifications [these should be handled by a separate, Twisted aware module in :mod:`IPython.kernel`].
26 * Provide zope.interface-style interfaces for the notification system [these should also be provided by the :mod:`IPython.kernel` module]
27
43
44 * Provide out-of-process or network notifications (these should be handled by
45 a separate, Twisted aware module in :mod:`IPython.kernel`).
46
47 * Provide zope.interface-style interfaces for the notification system (these
48 should also be provided by the :mod:`IPython.kernel` module).
49
28 50 Use Cases
29 51 =========
52
30 53 The following use cases describe the main intended uses of the notificaiton module and illustrate the main success scenario for each use case:
31 54
32 1. Dwight Schroot is writing a frontend for the IPython project. His frontend is stuck in the stone age and must communicate synchronously with an IPython.kernel.core.Interpreter instance. Because code is executed in blocks by the Interpreter, Dwight's UI freezes every time he executes a long block of code. To keep track of the progress of his long running block, Dwight adds the following code to his frontend's set-up code::
33 from IPython.kernel.core.notification import NotificationCenter
34 center = NotificationCenter.sharedNotificationCenter
35 center.registerObserver(self, type=IPython.kernel.core.Interpreter.STDOUT_NOTIFICATION_TYPE, notifying_object=self.interpreter, callback=self.stdout_notification)
36
37 and elsewhere in his front end::
38 def stdout_notification(self, type, notifying_object, out_string=None):
39 self.writeStdOut(out_string)
40
41 If everything works, the Interpreter will (according to its published API) fire a notification via the :data:`IPython.kernel.core.notification.sharedCenter` of type :const:`STD_OUT_NOTIFICATION_TYPE` before writing anything to stdout [it's up to the Intereter implementation to figure out when to do this]. The notificaiton center will then call the registered callbacks for that event type (in this case, Dwight's frontend's stdout_notification method). Again, according to its API, the Interpreter provides an additional keyword argument when firing the notificaiton of out_string, a copy of the string it will write to stdout.
42
43 Like magic, Dwight's frontend is able to provide output, even during long-running calculations. Now if Jim could just convince Dwight to use Twisted...
44
45 2. Boss Hog is writing a frontend for the IPython project. Because Boss Hog is stuck in the stone age, his frontend will be written in a new Fortran-like dialect of python and will run only from the command line. Because he doesn't need any fancy notification system and is used to worrying about every cycle on his rat-wheel powered mini, Boss Hog is adamant that the new notification system not produce any performance penalty. As they say in Hazard county, there's no such thing as a free lunch. If he wanted zero overhead, he should have kept using IPython 0.8. Instead, those tricky Duke boys slide in a suped-up bridge-out jumpin' awkwardly confederate-lovin' notification module that imparts only a constant (and small) performance penalty when the Interpreter (or any other object) fires an event for which there are no registered observers. Of course, the same notificaiton-enabled Interpreter can then be used in frontends that require notifications, thus saving the IPython project from a nasty civil war.
46
47 3. Barry is wrting a frontend for the IPython project. Because Barry's front end is the *new hotness*, it uses an asynchronous event model to communicate with a Twisted :mod:`~IPython.kernel.engineservice` that communicates with the IPython :class:`~IPython.kernel.core.interpreter.Interpreter`. Using the :mod:`IPython.kernel.notification` module, an asynchronous wrapper on the :mod:`IPython.kernel.core.notification` module, Barry's frontend can register for notifications from the interpreter that are delivered asynchronously. Even if Barry's frontend is running on a separate process or even host from the Interpreter, the notifications are delivered, as if by dark and twisted magic. Just like Dwight's frontend, Barry's frontend can now recieve notifications of e.g. writing to stdout/stderr, opening/closing an external file, an exception in the executing code, etc. No newline at end of file
55 1. Dwight Schroot is writing a frontend for the IPython project. His frontend is stuck in the stone age and must communicate synchronously with an IPython.kernel.core.Interpreter instance. Because code is executed in blocks by the Interpreter, Dwight's UI freezes every time he executes a long block of code. To keep track of the progress of his long running block, Dwight adds the following code to his frontend's set-up code::
56
57 from IPython.kernel.core.notification import NotificationCenter
58 center = NotificationCenter.sharedNotificationCenter
59 center.registerObserver(self, type=IPython.kernel.core.Interpreter.STDOUT_NOTIFICATION_TYPE, notifying_object=self.interpreter, callback=self.stdout_notification)
60
61 and elsewhere in his front end::
62
63 def stdout_notification(self, type, notifying_object, out_string=None):
64 self.writeStdOut(out_string)
65
66 If everything works, the Interpreter will (according to its published API)
67 fire a notification via the
68 :data:`IPython.kernel.core.notification.sharedCenter` of type
69 :const:`STD_OUT_NOTIFICATION_TYPE` before writing anything to stdout [it's up
70 to the Intereter implementation to figure out when to do this]. The
71 notificaiton center will then call the registered callbacks for that event
72 type (in this case, Dwight's frontend's stdout_notification method). Again,
73 according to its API, the Interpreter provides an additional keyword argument
74 when firing the notificaiton of out_string, a copy of the string it will write
75 to stdout.
76
77 Like magic, Dwight's frontend is able to provide output, even during
78 long-running calculations. Now if Jim could just convince Dwight to use
79 Twisted...
80
81 2. Boss Hog is writing a frontend for the IPython project. Because Boss Hog is stuck in the stone age, his frontend will be written in a new Fortran-like dialect of python and will run only from the command line. Because he doesn't need any fancy notification system and is used to worrying about every cycle on his rat-wheel powered mini, Boss Hog is adamant that the new notification system not produce any performance penalty. As they say in Hazard county, there's no such thing as a free lunch. If he wanted zero overhead, he should have kept using IPython 0.8. Instead, those tricky Duke boys slide in a suped-up bridge-out jumpin' awkwardly confederate-lovin' notification module that imparts only a constant (and small) performance penalty when the Interpreter (or any other object) fires an event for which there are no registered observers. Of course, the same notificaiton-enabled Interpreter can then be used in frontends that require notifications, thus saving the IPython project from a nasty civil war.
82
83 3. Barry is wrting a frontend for the IPython project. Because Barry's front end is the *new hotness*, it uses an asynchronous event model to communicate with a Twisted :mod:`~IPython.kernel.engineservice` that communicates with the IPython :class:`~IPython.kernel.core.interpreter.Interpreter`. Using the :mod:`IPython.kernel.notification` module, an asynchronous wrapper on the :mod:`IPython.kernel.core.notification` module, Barry's frontend can register for notifications from the interpreter that are delivered asynchronously. Even if Barry's frontend is running on a separate process or even host from the Interpreter, the notifications are delivered, as if by dark and twisted magic. Just like Dwight's frontend, Barry's frontend can now recieve notifications of e.g. writing to stdout/stderr, opening/closing an external file, an exception in the executing code, etc. No newline at end of file
@@ -1,96 +1,81
1 1 .. _roadmap:
2 2
3 3 ===================
4 4 Development roadmap
5 5 ===================
6 6
7 .. contents::
8
9 7 IPython is an ambitious project that is still under heavy development. However, we want IPython to become useful to as many people as possible, as quickly as possible. To help us accomplish this, we are laying out a roadmap of where we are headed and what needs to happen to get there. Hopefully, this will help the IPython developers figure out the best things to work on for each upcoming release.
10 8
11 Speaking of releases, we are going to begin releasing a new version of IPython every four weeks. We are hoping that a regular release schedule, along with a clear roadmap of where we are headed will propel the project forward.
12
13 Where are we headed
14 ===================
9 Work targeted to particular releases
10 ====================================
15 11
16 Our goal with IPython is simple: to provide a *powerful*, *robust* and *easy to use* framework for parallel computing. While there are other secondary goals you will hear us talking about at various times, this is the primary goal of IPython that frames the roadmap.
12 Release 0.10
13 ------------
17 14
18 Steps along the way
19 ===================
15 * Initial refactor of :command:`ipcluster`.
20 16
21 Here we describe the various things that we need to work on to accomplish this goal.
17 * Better TextMate integration.
22 18
23 Setting up for regular release schedule
24 ---------------------------------------
19 * Merge in the daemon branch.
25 20
26 We would like to begin to release IPython regularly (probably a 4 week release cycle). To get ready for this, we need to revisit the development guidelines and put in information about releasing IPython.
21 Release 0.11
22 ------------
27 23
28 Process startup and management
29 ------------------------------
24 * Refactor the configuration system and command line options for
25 :command:`ipengine` and :command:`ipcontroller`. This will include the
26 creation of cluster directories that encapsulate all the configuration
27 files, log files and security related files for a particular cluster.
30 28
31 IPython is implemented using a distributed set of processes that communicate using TCP/IP network channels. Currently, users have to start each of the various processes separately using command line scripts. This is both difficult and error prone. Furthermore, there are a number of things that often need to be managed once the processes have been started, such as the sending of signals and the shutting down and cleaning up of processes.
29 * Refactor :command:`ipcluster` to support the new configuration system.
32 30
33 We need to build a system that makes it trivial for users to start and manage IPython processes. This system should have the following properties:
31 * Refactor the daemon stuff to support the new configuration system.
34 32
35 * It should possible to do everything through an extremely simple API that users
36 can call from their own Python script. No shell commands should be needed.
37 * This simple API should be configured using standard .ini files.
38 * The system should make it possible to start processes using a number of different
39 approaches: SSH, PBS/Torque, Xgrid, Windows Server, mpirun, etc.
40 * The controller and engine processes should each have a daemon for monitoring,
41 signaling and clean up.
42 * The system should be secure.
43 * The system should work under all the major operating systems, including
44 Windows.
33 * Merge back in the core of the notebook.
45 34
46 Initial work has begun on the daemon infrastructure, and some of the needed logic is contained in the ipcluster script.
35 Release 0.12
36 ------------
47 37
48 Ease of use/high-level approaches to parallelism
49 ------------------------------------------------
38 * Fully integrate process startup with the daemons for full process
39 management.
50 40
51 While our current API for clients is well designed, we can still do a lot better in designing a user-facing API that is super simple. The main goal here is that it should take *almost no extra code* for users to get their code running in parallel. For this to be possible, we need to tie into Python's standard idioms that enable efficient coding. The biggest ones we are looking at are using context managers (i.e., Python 2.5's ``with`` statement) and decorators. Initial work on this front has begun, but more work is needed.
41 * Make the capabilites of :command:`ipcluster` available from simple Python
42 classes.
52 43
53 We also need to think about new models for expressing parallelism. This is fun work as most of the foundation has already been established.
44 Major areas of work
45 ===================
54 46
55 Security
56 --------
47 Refactoring the main IPython core
48 ---------------------------------
57 49
58 Currently, IPython has no built in security or security model. Because we would like IPython to be usable on public computer systems and over wide area networks, we need to come up with a robust solution for security. Here are some of the specific things that need to be included:
50 Process management for :mod:`IPython.kernel`
51 --------------------------------------------
59 52
60 * User authentication between all processes (engines, controller and clients).
61 * Optional TSL/SSL based encryption of all communication channels.
62 * A good way of picking network ports so multiple users on the same system can
63 run their own controller and engines without interfering with those of others.
64 * A clear model for security that enables users to evaluate the security risks
65 associated with using IPython in various manners.
53 Configuration system
54 --------------------
66 55
67 For the implementation of this, we plan on using Twisted's support for SSL and authentication. One things that we really should look at is the `Foolscap`_ network protocol, which provides many of these things out of the box.
56 Performance problems
57 --------------------
68 58
69 .. _Foolscap: http://foolscap.lothar.com/trac
59 Currently, we have a number of performance issues that are waiting to bite users:
70 60
71 The security work needs to be done in conjunction with other network protocol stuff.
61 * The controller stores a large amount of state in Python dictionaries. Under
62 heavy usage, these dicts with get very large, causing memory usage problems.
63 We need to develop more scalable solutions to this problem, such as using a
64 sqlite database to store this state. This will also help the controller to
65 be more fault tolerant.
72 66
73 Latent performance issues
74 -------------------------
67 * We currently don't have a good way of handling large objects in the
68 controller. The biggest problem is that because we don't have any way of
69 streaming objects, we get lots of temporary copies in the low-level buffers.
70 We need to implement a better serialization approach and true streaming
71 support.
75 72
76 Currently, we have a number of performance issues that are waiting to bite users:
73 * The controller currently unpickles and repickles objects. We need to use the
74 [push|pull]_serialized methods instead.
77 75
78 * The controller store a large amount of state in Python dictionaries. Under heavy
79 usage, these dicts with get very large, causing memory usage problems. We need to
80 develop more scalable solutions to this problem, such as using a sqlite database
81 to store this state. This will also help the controller to be more fault tolerant.
82 * Currently, the client to controller connections are done through XML-RPC using
83 HTTP 1.0. This is very inefficient as XML-RPC is a very verbose protocol and
84 each request must be handled with a new connection. We need to move these network
85 connections over to PB or Foolscap.
86 * We currently don't have a good way of handling large objects in the controller.
87 The biggest problem is that because we don't have any way of streaming objects,
88 we get lots of temporary copies in the low-level buffers. We need to implement
89 a better serialization approach and true streaming support.
90 * The controller currently unpickles and repickles objects. We need to use the
91 [push|pull]_serialized methods instead.
92 * Currently the controller is a bottleneck. We need the ability to scale the
93 controller by aggregating multiple controllers into one effective controller.
76 * Currently the controller is a bottleneck. The best approach for this is to
77 separate the controller itself into multiple processes, one for the core
78 controller and one each for the controller interfaces.
94 79
95 80
96 81
@@ -1,93 +1,105
1 1 .. _faq:
2 2
3 3 ========================================
4 4 Frequently asked questions
5 5 ========================================
6 6
7 7 General questions
8 8 =================
9 9
10 10 Questions about parallel computing with IPython
11 11 ================================================
12 12
13 13 Will IPython speed my Python code up?
14 14 --------------------------------------
15 15
16 16 Yes and no. When converting a serial code to run in parallel, there often many
17 17 difficulty questions that need to be answered, such as:
18 18
19 * How should data be decomposed onto the set of processors?
20 * What are the data movement patterns?
21 * Can the algorithm be structured to minimize data movement?
22 * Is dynamic load balancing important?
19 * How should data be decomposed onto the set of processors?
20
21 * What are the data movement patterns?
22
23 * Can the algorithm be structured to minimize data movement?
24
25 * Is dynamic load balancing important?
23 26
24 27 We can't answer such questions for you. This is the hard (but fun) work of parallel
25 28 computing. But, once you understand these things IPython will make it easier for you to
26 29 implement a good solution quickly. Most importantly, you will be able to use the
27 30 resulting parallel code interactively.
28 31
29 32 With that said, if your problem is trivial to parallelize, IPython has a number of
30 33 different interfaces that will enable you to parallelize things is almost no time at
31 all. A good place to start is the ``map`` method of our `multiengine interface`_.
32
33 .. _multiengine interface: ./parallel_multiengine
34 all. A good place to start is the ``map`` method of our :class:`MultiEngineClient`.
34 35
35 36 What is the best way to use MPI from Python?
36 37 --------------------------------------------
37 38
38 39 What about all the other parallel computing packages in Python?
39 40 ---------------------------------------------------------------
40 41
41 42 Some of the unique characteristic of IPython are:
42 43
43 * IPython is the only architecture that abstracts out the notion of a
44 parallel computation in such a way that new models of parallel computing
45 can be explored quickly and easily. If you don't like the models we
46 provide, you can simply create your own using the capabilities we provide.
47 * IPython is asynchronous from the ground up (we use `Twisted`_).
48 * IPython's architecture is designed to avoid subtle problems
49 that emerge because of Python's global interpreter lock (GIL).
50 * While IPython'1 architecture is designed to support a wide range
51 of novel parallel computing models, it is fully interoperable with
52 traditional MPI applications.
53 * IPython has been used and tested extensively on modern supercomputers.
54 * IPython's networking layers are completely modular. Thus, is
55 straightforward to replace our existing network protocols with
56 high performance alternatives (ones based upon Myranet/Infiniband).
57 * IPython is designed from the ground up to support collaborative
58 parallel computing. This enables multiple users to actively develop
59 and run the *same* parallel computation.
60 * Interactivity is a central goal for us. While IPython does not have
61 to be used interactivly, is can be.
62
44 * IPython is the only architecture that abstracts out the notion of a
45 parallel computation in such a way that new models of parallel computing
46 can be explored quickly and easily. If you don't like the models we
47 provide, you can simply create your own using the capabilities we provide.
48
49 * IPython is asynchronous from the ground up (we use `Twisted`_).
50
51 * IPython's architecture is designed to avoid subtle problems
52 that emerge because of Python's global interpreter lock (GIL).
53
54 * While IPython's architecture is designed to support a wide range
55 of novel parallel computing models, it is fully interoperable with
56 traditional MPI applications.
57
58 * IPython has been used and tested extensively on modern supercomputers.
59
60 * IPython's networking layers are completely modular. Thus, is
61 straightforward to replace our existing network protocols with
62 high performance alternatives (ones based upon Myranet/Infiniband).
63
64 * IPython is designed from the ground up to support collaborative
65 parallel computing. This enables multiple users to actively develop
66 and run the *same* parallel computation.
67
68 * Interactivity is a central goal for us. While IPython does not have
69 to be used interactivly, it can be.
70
63 71 .. _Twisted: http://www.twistedmatrix.com
64 72
65 73 Why The IPython controller a bottleneck in my parallel calculation?
66 74 -------------------------------------------------------------------
67 75
68 76 A golden rule in parallel computing is that you should only move data around if you
69 77 absolutely need to. The main reason that the controller becomes a bottleneck is that
70 78 too much data is being pushed and pulled to and from the engines. If your algorithm
71 79 is structured in this way, you really should think about alternative ways of
72 80 handling the data movement. Here are some ideas:
73 81
74 1. Have the engines write data to files on the locals disks of the engines.
75 2. Have the engines write data to files on a file system that is shared by
76 the engines.
77 3. Have the engines write data to a database that is shared by the engines.
78 4. Simply keep data in the persistent memory of the engines and move the
79 computation to the data (rather than the data to the computation).
80 5. See if you can pass data directly between engines using MPI.
82 1. Have the engines write data to files on the locals disks of the engines.
83
84 2. Have the engines write data to files on a file system that is shared by
85 the engines.
86
87 3. Have the engines write data to a database that is shared by the engines.
88
89 4. Simply keep data in the persistent memory of the engines and move the
90 computation to the data (rather than the data to the computation).
91
92 5. See if you can pass data directly between engines using MPI.
81 93
82 94 Isn't Python slow to be used for high-performance parallel computing?
83 95 ---------------------------------------------------------------------
84 96
85 97
86 98
87 99
88 100
89 101
90 102
91 103
92 104
93 105
@@ -1,56 +1,38
1 1 .. _history:
2 2
3 3 =======
4 4 History
5 5 =======
6 6
7 7 Origins
8 8 =======
9 9
10 The current IPython system grew out of the following three projects:
11
12 * [ipython] by Fernando Pérez. I was working on adding
13 Mathematica-type prompts and a flexible configuration system
14 (something better than $PYTHONSTARTUP) to the standard Python
15 interactive interpreter.
16 * [IPP] by Janko Hauser. Very well organized, great usability. Had
17 an old help system. IPP was used as the 'container' code into
18 which I added the functionality from ipython and LazyPython.
19 * [LazyPython] by Nathan Gray. Simple but very powerful. The quick
20 syntax (auto parens, auto quotes) and verbose/colored tracebacks
21 were all taken from here.
22
23 When I found out about IPP and LazyPython I tried to join all three
24 into a unified system. I thought this could provide a very nice
25 working environment, both for regular programming and scientific
26 computing: shell-like features, IDL/Matlab numerics, Mathematica-type
27 prompt history and great object introspection and help facilities. I
28 think it worked reasonably well, though it was a lot more work than I
29 had initially planned.
30
31
32 Current status
33 ==============
34
35 The above listed features work, and quite well for the most part. But
36 until a major internal restructuring is done (see below), only bug
37 fixing will be done, no other features will be added (unless very minor
38 and well localized in the cleaner parts of the code).
39
40 IPython consists of some 18000 lines of pure python code, of which
41 roughly two thirds is reasonably clean. The rest is, messy code which
42 needs a massive restructuring before any further major work is done.
43 Even the messy code is fairly well documented though, and most of the
44 problems in the (non-existent) class design are well pointed to by a
45 PyChecker run. So the rewriting work isn't that bad, it will just be
46 time-consuming.
47
48
49 Future
50 ------
51
52 See the separate new_design document for details. Ultimately, I would
53 like to see IPython become part of the standard Python distribution as a
54 'big brother with batteries' to the standard Python interactive
55 interpreter. But that will never happen with the current state of the
56 code, so all contributions are welcome. No newline at end of file
10 IPython was starting in 2001 by Fernando Perez. IPython as we know it
11 today grew out of the following three projects:
12
13 * ipython by Fernando Pérez. I was working on adding
14 Mathematica-type prompts and a flexible configuration system
15 (something better than $PYTHONSTARTUP) to the standard Python
16 interactive interpreter.
17 * IPP by Janko Hauser. Very well organized, great usability. Had
18 an old help system. IPP was used as the 'container' code into
19 which I added the functionality from ipython and LazyPython.
20 * LazyPython by Nathan Gray. Simple but very powerful. The quick
21 syntax (auto parens, auto quotes) and verbose/colored tracebacks
22 were all taken from here.
23
24 Here is how Fernando describes it:
25
26 When I found out about IPP and LazyPython I tried to join all three
27 into a unified system. I thought this could provide a very nice
28 working environment, both for regular programming and scientific
29 computing: shell-like features, IDL/Matlab numerics, Mathematica-type
30 prompt history and great object introspection and help facilities. I
31 think it worked reasonably well, though it was a lot more work than I
32 had initially planned.
33
34 Today and how we got here
35 =========================
36
37 This needs to be filled in.
38
@@ -1,28 +1,30
1 1 =====================
2 2 IPython Documentation
3 3 =====================
4 4
5 Contents
6 ========
5 .. htmlonly::
6
7 :Release: |release|
8 :Date: |today|
9
10 Contents:
7 11
8 12 .. toctree::
9 :maxdepth: 1
13 :maxdepth: 2
10 14
11 15 overview.txt
12 16 install/index.txt
13 17 interactive/index.txt
14 18 parallel/index.txt
15 19 config/index.txt
16 20 changes.txt
17 21 development/index.txt
18 22 faq.txt
19 23 history.txt
20 24 license_and_copyright.txt
21 25 credits.txt
22 26
23 Indices and tables
24 ==================
25
26 * :ref:`genindex`
27 * :ref:`modindex`
28 * :ref:`search` No newline at end of file
27 .. htmlonly::
28 * :ref:`genindex`
29 * :ref:`modindex`
30 * :ref:`search`
@@ -1,11 +1,10
1 1 .. _install_index:
2 2
3 3 ==================
4 4 Installation
5 5 ==================
6 6
7 7 .. toctree::
8 8 :maxdepth: 2
9 9
10 basic.txt
11 advanced.txt
10 install.txt
@@ -1,11 +1,11
1 1 ==================================
2 2 Using IPython for interactive work
3 3 ==================================
4 4
5 5 .. toctree::
6 :maxdepth: 1
6 :maxdepth: 2
7 7
8 8 tutorial.txt
9 9 reference.txt
10 10 shell.txt
11 11 extension_api.txt
@@ -1,3163 +1,3194
1 .. IPython documentation master file, created by sphinx-quickstart.py on Mon Mar 24 17:01:34 2008.
2 You can adapt this file completely to your liking, but it should at least
3 contain the root 'toctree' directive.
4
5 1 =================
6 2 IPython reference
7 3 =================
8 4
9 .. contents::
10
11 .. _Command line options:
5 .. _command_line_options:
12 6
13 7 Command-line usage
14 8 ==================
15 9
16 10 You start IPython with the command::
17 11
18 12 $ ipython [options] files
19 13
20 14 If invoked with no options, it executes all the files listed in sequence
21 15 and drops you into the interpreter while still acknowledging any options
22 16 you may have set in your ipythonrc file. This behavior is different from
23 17 standard Python, which when called as python -i will only execute one
24 18 file and ignore your configuration setup.
25 19
26 20 Please note that some of the configuration options are not available at
27 21 the command line, simply because they are not practical here. Look into
28 22 your ipythonrc configuration file for details on those. This file
29 23 typically installed in the $HOME/.ipython directory. For Windows users,
30 24 $HOME resolves to C:\\Documents and Settings\\YourUserName in most
31 25 instances. In the rest of this text, we will refer to this directory as
32 26 IPYTHONDIR.
33 27
34 28 .. _Threading options:
35 29
36 30
37 31 Special Threading Options
38 32 -------------------------
39 33
40 34 The following special options are ONLY valid at the beginning of the
41 35 command line, and not later. This is because they control the initial-
42 36 ization of ipython itself, before the normal option-handling mechanism
43 37 is active.
44 38
45 39 -gthread, -qthread, -q4thread, -wthread, -pylab:
46 40 Only one of these can be given, and it can only be given as
47 41 the first option passed to IPython (it will have no effect in
48 42 any other position). They provide threading support for the
49 43 GTK, Qt (versions 3 and 4) and WXPython toolkits, and for the
50 44 matplotlib library.
51 45
52 46 With any of the first four options, IPython starts running a
53 47 separate thread for the graphical toolkit's operation, so that
54 48 you can open and control graphical elements from within an
55 49 IPython command line, without blocking. All four provide
56 50 essentially the same functionality, respectively for GTK, Qt3,
57 51 Qt4 and WXWidgets (via their Python interfaces).
58 52
59 53 Note that with -wthread, you can additionally use the
60 54 -wxversion option to request a specific version of wx to be
61 55 used. This requires that you have the wxversion Python module
62 56 installed, which is part of recent wxPython distributions.
63 57
64 58 If -pylab is given, IPython loads special support for the mat
65 59 plotlib library (http://matplotlib.sourceforge.net), allowing
66 60 interactive usage of any of its backends as defined in the
67 61 user's ~/.matplotlib/matplotlibrc file. It automatically
68 62 activates GTK, Qt or WX threading for IPyhton if the choice of
69 63 matplotlib backend requires it. It also modifies the %run
70 64 command to correctly execute (without blocking) any
71 65 matplotlib-based script which calls show() at the end.
72 66
73 67 -tk
74 68 The -g/q/q4/wthread options, and -pylab (if matplotlib is
75 69 configured to use GTK, Qt3, Qt4 or WX), will normally block Tk
76 70 graphical interfaces. This means that when either GTK, Qt or WX
77 71 threading is active, any attempt to open a Tk GUI will result in a
78 72 dead window, and possibly cause the Python interpreter to crash.
79 73 An extra option, -tk, is available to address this issue. It can
80 74 only be given as a second option after any of the above (-gthread,
81 75 -wthread or -pylab).
82 76
83 77 If -tk is given, IPython will try to coordinate Tk threading
84 78 with GTK, Qt or WX. This is however potentially unreliable, and
85 79 you will have to test on your platform and Python configuration to
86 80 determine whether it works for you. Debian users have reported
87 81 success, apparently due to the fact that Debian builds all of Tcl,
88 82 Tk, Tkinter and Python with pthreads support. Under other Linux
89 83 environments (such as Fedora Core 2/3), this option has caused
90 84 random crashes and lockups of the Python interpreter. Under other
91 85 operating systems (Mac OSX and Windows), you'll need to try it to
92 86 find out, since currently no user reports are available.
93 87
94 88 There is unfortunately no way for IPython to determine at run time
95 89 whether -tk will work reliably or not, so you will need to do some
96 90 experiments before relying on it for regular work.
97 91
98 92
99 93
100 94 Regular Options
101 95 ---------------
102 96
103 97 After the above threading options have been given, regular options can
104 98 follow in any order. All options can be abbreviated to their shortest
105 99 non-ambiguous form and are case-sensitive. One or two dashes can be
106 100 used. Some options have an alternate short form, indicated after a ``|``.
107 101
108 102 Most options can also be set from your ipythonrc configuration file. See
109 103 the provided example for more details on what the options do. Options
110 104 given at the command line override the values set in the ipythonrc file.
111 105
112 106 All options with a [no] prepended can be specified in negated form
113 107 (-nooption instead of -option) to turn the feature off.
114 108
115 109 -help print a help message and exit.
116 110
117 111 -pylab
118 112 this can only be given as the first option passed to IPython
119 113 (it will have no effect in any other position). It adds
120 114 special support for the matplotlib library
121 115 (http://matplotlib.sourceforge.ne), allowing interactive usage
122 116 of any of its backends as defined in the user's .matplotlibrc
123 117 file. It automatically activates GTK or WX threading for
124 118 IPyhton if the choice of matplotlib backend requires it. It
125 119 also modifies the %run command to correctly execute (without
126 120 blocking) any matplotlib-based script which calls show() at
127 121 the end. See `Matplotlib support`_ for more details.
128 122
129 123 -autocall <val>
130 124 Make IPython automatically call any callable object even if you
131 125 didn't type explicit parentheses. For example, 'str 43' becomes
132 126 'str(43)' automatically. The value can be '0' to disable the feature,
133 127 '1' for smart autocall, where it is not applied if there are no more
134 128 arguments on the line, and '2' for full autocall, where all callable
135 129 objects are automatically called (even if no arguments are
136 130 present). The default is '1'.
137 131
138 132 -[no]autoindent
139 133 Turn automatic indentation on/off.
140 134
141 135 -[no]automagic
142 136 make magic commands automatic (without needing their first character
143 137 to be %). Type %magic at the IPython prompt for more information.
144 138
145 139 -[no]autoedit_syntax
146 140 When a syntax error occurs after editing a file, automatically
147 141 open the file to the trouble causing line for convenient
148 142 fixing.
149 143
150 144 -[no]banner Print the initial information banner (default on).
151 145
152 146 -c <command>
153 147 execute the given command string. This is similar to the -c
154 148 option in the normal Python interpreter.
155 149
156 150 -cache_size, cs <n>
157 151 size of the output cache (maximum number of entries to hold in
158 152 memory). The default is 1000, you can change it permanently in your
159 153 config file. Setting it to 0 completely disables the caching system,
160 154 and the minimum value accepted is 20 (if you provide a value less than
161 155 20, it is reset to 0 and a warning is issued) This limit is defined
162 156 because otherwise you'll spend more time re-flushing a too small cache
163 157 than working.
164 158
165 159 -classic, cl
166 160 Gives IPython a similar feel to the classic Python
167 161 prompt.
168 162
169 163 -colors <scheme>
170 164 Color scheme for prompts and exception reporting. Currently
171 165 implemented: NoColor, Linux and LightBG.
172 166
173 167 -[no]color_info
174 168 IPython can display information about objects via a set of functions,
175 169 and optionally can use colors for this, syntax highlighting source
176 170 code and various other elements. However, because this information is
177 171 passed through a pager (like 'less') and many pagers get confused with
178 172 color codes, this option is off by default. You can test it and turn
179 173 it on permanently in your ipythonrc file if it works for you. As a
180 174 reference, the 'less' pager supplied with Mandrake 8.2 works ok, but
181 175 that in RedHat 7.2 doesn't.
182 176
183 177 Test it and turn it on permanently if it works with your
184 178 system. The magic function %color_info allows you to toggle this
185 179 interactively for testing.
186 180
187 181 -[no]debug
188 182 Show information about the loading process. Very useful to pin down
189 183 problems with your configuration files or to get details about
190 184 session restores.
191 185
192 186 -[no]deep_reload:
193 187 IPython can use the deep_reload module which reloads changes in
194 188 modules recursively (it replaces the reload() function, so you don't
195 189 need to change anything to use it). deep_reload() forces a full
196 190 reload of modules whose code may have changed, which the default
197 191 reload() function does not.
198 192
199 193 When deep_reload is off, IPython will use the normal reload(),
200 194 but deep_reload will still be available as dreload(). This
201 195 feature is off by default [which means that you have both
202 196 normal reload() and dreload()].
203 197
204 198 -editor <name>
205 199 Which editor to use with the %edit command. By default,
206 200 IPython will honor your EDITOR environment variable (if not
207 201 set, vi is the Unix default and notepad the Windows one).
208 202 Since this editor is invoked on the fly by IPython and is
209 203 meant for editing small code snippets, you may want to use a
210 204 small, lightweight editor here (in case your default EDITOR is
211 205 something like Emacs).
212 206
213 207 -ipythondir <name>
214 208 name of your IPython configuration directory IPYTHONDIR. This
215 209 can also be specified through the environment variable
216 210 IPYTHONDIR.
217 211
218 212 -log, l
219 213 generate a log file of all input. The file is named
220 214 ipython_log.py in your current directory (which prevents logs
221 215 from multiple IPython sessions from trampling each other). You
222 216 can use this to later restore a session by loading your
223 217 logfile as a file to be executed with option -logplay (see
224 218 below).
225 219
226 220 -logfile, lf <name> specify the name of your logfile.
227 221
228 222 -logplay, lp <name>
229 223
230 224 you can replay a previous log. For restoring a session as close as
231 225 possible to the state you left it in, use this option (don't just run
232 226 the logfile). With -logplay, IPython will try to reconstruct the
233 227 previous working environment in full, not just execute the commands in
234 228 the logfile.
235 229
236 230 When a session is restored, logging is automatically turned on
237 231 again with the name of the logfile it was invoked with (it is
238 232 read from the log header). So once you've turned logging on for
239 233 a session, you can quit IPython and reload it as many times as
240 234 you want and it will continue to log its history and restore
241 235 from the beginning every time.
242 236
243 237 Caveats: there are limitations in this option. The history
244 238 variables _i*,_* and _dh don't get restored properly. In the
245 239 future we will try to implement full session saving by writing
246 240 and retrieving a 'snapshot' of the memory state of IPython. But
247 241 our first attempts failed because of inherent limitations of
248 242 Python's Pickle module, so this may have to wait.
249 243
250 244 -[no]messages
251 245 Print messages which IPython collects about its startup
252 246 process (default on).
253 247
254 248 -[no]pdb
255 249 Automatically call the pdb debugger after every uncaught
256 250 exception. If you are used to debugging using pdb, this puts
257 251 you automatically inside of it after any call (either in
258 252 IPython or in code called by it) which triggers an exception
259 253 which goes uncaught.
260 254
261 255 -pydb
262 256 Makes IPython use the third party "pydb" package as debugger,
263 257 instead of pdb. Requires that pydb is installed.
264 258
265 259 -[no]pprint
266 260 ipython can optionally use the pprint (pretty printer) module
267 261 for displaying results. pprint tends to give a nicer display
268 262 of nested data structures. If you like it, you can turn it on
269 263 permanently in your config file (default off).
270 264
271 265 -profile, p <name>
272 266
273 267 assume that your config file is ipythonrc-<name> or
274 268 ipy_profile_<name>.py (looks in current dir first, then in
275 269 IPYTHONDIR). This is a quick way to keep and load multiple
276 270 config files for different tasks, especially if you use the
277 271 include option of config files. You can keep a basic
278 272 IPYTHONDIR/ipythonrc file and then have other 'profiles' which
279 273 include this one and load extra things for particular
280 274 tasks. For example:
281 275
282 276 1. $HOME/.ipython/ipythonrc : load basic things you always want.
283 277 2. $HOME/.ipython/ipythonrc-math : load (1) and basic math-related modules.
284 278 3. $HOME/.ipython/ipythonrc-numeric : load (1) and Numeric and plotting modules.
285 279
286 280 Since it is possible to create an endless loop by having
287 281 circular file inclusions, IPython will stop if it reaches 15
288 282 recursive inclusions.
289 283
290 284 -prompt_in1, pi1 <string>
291 Specify the string used for input prompts. Note that if you
292 are using numbered prompts, the number is represented with a
293 '\#' in the string. Don't forget to quote strings with spaces
294 embedded in them. Default: 'In [\#]:'. Sec. Prompts_
295 discusses in detail all the available escapes to customize
296 your prompts.
285
286 Specify the string used for input prompts. Note that if you are using
287 numbered prompts, the number is represented with a '\#' in the
288 string. Don't forget to quote strings with spaces embedded in
289 them. Default: 'In [\#]:'. The :ref:`prompts section <prompts>`
290 discusses in detail all the available escapes to customize your
291 prompts.
297 292
298 293 -prompt_in2, pi2 <string>
299 294 Similar to the previous option, but used for the continuation
300 295 prompts. The special sequence '\D' is similar to '\#', but
301 296 with all digits replaced dots (so you can have your
302 297 continuation prompt aligned with your input prompt). Default:
303 298 ' .\D.:' (note three spaces at the start for alignment with
304 299 'In [\#]').
305 300
306 301 -prompt_out,po <string>
307 302 String used for output prompts, also uses numbers like
308 303 prompt_in1. Default: 'Out[\#]:'
309 304
310 305 -quick start in bare bones mode (no config file loaded).
311 306
312 307 -rcfile <name>
313 308 name of your IPython resource configuration file. Normally
314 309 IPython loads ipythonrc (from current directory) or
315 310 IPYTHONDIR/ipythonrc.
316 311
317 312 If the loading of your config file fails, IPython starts with
318 313 a bare bones configuration (no modules loaded at all).
319 314
320 315 -[no]readline
321 316 use the readline library, which is needed to support name
322 317 completion and command history, among other things. It is
323 318 enabled by default, but may cause problems for users of
324 319 X/Emacs in Python comint or shell buffers.
325 320
326 321 Note that X/Emacs 'eterm' buffers (opened with M-x term) support
327 322 IPython's readline and syntax coloring fine, only 'emacs' (M-x
328 323 shell and C-c !) buffers do not.
329 324
330 325 -screen_length, sl <n>
331 326 number of lines of your screen. This is used to control
332 327 printing of very long strings. Strings longer than this number
333 328 of lines will be sent through a pager instead of directly
334 329 printed.
335 330
336 331 The default value for this is 0, which means IPython will
337 332 auto-detect your screen size every time it needs to print certain
338 333 potentially long strings (this doesn't change the behavior of the
339 334 'print' keyword, it's only triggered internally). If for some
340 335 reason this isn't working well (it needs curses support), specify
341 336 it yourself. Otherwise don't change the default.
342 337
343 338 -separate_in, si <string>
344 339
345 340 separator before input prompts.
346 341 Default: '\n'
347 342
348 343 -separate_out, so <string>
349 344 separator before output prompts.
350 345 Default: nothing.
351 346
352 347 -separate_out2, so2
353 348 separator after output prompts.
354 349 Default: nothing.
355 350 For these three options, use the value 0 to specify no separator.
356 351
357 352 -nosep
358 353 shorthand for '-SeparateIn 0 -SeparateOut 0 -SeparateOut2
359 354 0'. Simply removes all input/output separators.
360 355
361 356 -upgrade
362 357 allows you to upgrade your IPYTHONDIR configuration when you
363 358 install a new version of IPython. Since new versions may
364 359 include new command line options or example files, this copies
365 360 updated ipythonrc-type files. However, it backs up (with a
366 361 .old extension) all files which it overwrites so that you can
367 362 merge back any customizations you might have in your personal
368 363 files. Note that you should probably use %upgrade instead,
369 364 it's a safer alternative.
370 365
371 366
372 367 -Version print version information and exit.
373 368
374 369 -wxversion <string>
375 370 Select a specific version of wxPython (used in conjunction
376 371 with -wthread). Requires the wxversion module, part of recent
377 372 wxPython distributions
378 373
379 374 -xmode <modename>
380 375
381 376 Mode for exception reporting.
382 377
383 378 Valid modes: Plain, Context and Verbose.
384 379
385 380 * Plain: similar to python's normal traceback printing.
386 381 * Context: prints 5 lines of context source code around each
387 382 line in the traceback.
388 383 * Verbose: similar to Context, but additionally prints the
389 384 variables currently visible where the exception happened
390 385 (shortening their strings if too long). This can potentially be
391 386 very slow, if you happen to have a huge data structure whose
392 387 string representation is complex to compute. Your computer may
393 388 appear to freeze for a while with cpu usage at 100%. If this
394 389 occurs, you can cancel the traceback with Ctrl-C (maybe hitting it
395 390 more than once).
396 391
397 392 Interactive use
398 393 ===============
399 394
400 395 Warning: IPython relies on the existence of a global variable called
401 396 _ip which controls the shell itself. If you redefine _ip to anything,
402 397 bizarre behavior will quickly occur.
403 398
404 399 Other than the above warning, IPython is meant to work as a drop-in
405 400 replacement for the standard interactive interpreter. As such, any code
406 401 which is valid python should execute normally under IPython (cases where
407 402 this is not true should be reported as bugs). It does, however, offer
408 403 many features which are not available at a standard python prompt. What
409 404 follows is a list of these.
410 405
411 406
412 407 Caution for Windows users
413 408 -------------------------
414 409
415 410 Windows, unfortunately, uses the '\' character as a path
416 411 separator. This is a terrible choice, because '\' also represents the
417 412 escape character in most modern programming languages, including
418 413 Python. For this reason, using '/' character is recommended if you
419 414 have problems with ``\``. However, in Windows commands '/' flags
420 415 options, so you can not use it for the root directory. This means that
421 416 paths beginning at the root must be typed in a contrived manner like:
422 417 ``%copy \opt/foo/bar.txt \tmp``
423 418
424 419 .. _magic:
425 420
426 421 Magic command system
427 422 --------------------
428 423
429 424 IPython will treat any line whose first character is a % as a special
430 425 call to a 'magic' function. These allow you to control the behavior of
431 426 IPython itself, plus a lot of system-type features. They are all
432 427 prefixed with a % character, but parameters are given without
433 428 parentheses or quotes.
434 429
435 430 Example: typing '%cd mydir' (without the quotes) changes you working
436 431 directory to 'mydir', if it exists.
437 432
438 433 If you have 'automagic' enabled (in your ipythonrc file, via the command
439 434 line option -automagic or with the %automagic function), you don't need
440 435 to type in the % explicitly. IPython will scan its internal list of
441 436 magic functions and call one if it exists. With automagic on you can
442 437 then just type 'cd mydir' to go to directory 'mydir'. The automagic
443 438 system has the lowest possible precedence in name searches, so defining
444 439 an identifier with the same name as an existing magic function will
445 440 shadow it for automagic use. You can still access the shadowed magic
446 441 function by explicitly using the % character at the beginning of the line.
447 442
448 443 An example (with automagic on) should clarify all this::
449 444
450 445 In [1]: cd ipython # %cd is called by automagic
451 446
452 447 /home/fperez/ipython
453 448
454 449 In [2]: cd=1 # now cd is just a variable
455 450
456 451 In [3]: cd .. # and doesn't work as a function anymore
457 452
458 453 ------------------------------
459 454
460 455 File "<console>", line 1
461 456
462 457 cd ..
463 458
464 459 ^
465 460
466 461 SyntaxError: invalid syntax
467 462
468 463 In [4]: %cd .. # but %cd always works
469 464
470 465 /home/fperez
471 466
472 467 In [5]: del cd # if you remove the cd variable
473 468
474 469 In [6]: cd ipython # automagic can work again
475 470
476 471 /home/fperez/ipython
477 472
478 473 You can define your own magic functions to extend the system. The
479 474 following example defines a new magic command, %impall::
480 475
481 476 import IPython.ipapi
482 477
483 478 ip = IPython.ipapi.get()
484 479
485 480 def doimp(self, arg):
486 481
487 482 ip = self.api
488 483
489 484 ip.ex("import %s; reload(%s); from %s import *" % (
490 485
491 486 arg,arg,arg)
492 487
493 488 )
494 489
495 490 ip.expose_magic('impall', doimp)
496 491
497 492 You can also define your own aliased names for magic functions. In your
498 493 ipythonrc file, placing a line like:
499 494
500 495 execute __IP.magic_cl = __IP.magic_clear
501 496
502 497 will define %cl as a new name for %clear.
503 498
504 499 Type %magic for more information, including a list of all available
505 500 magic functions at any time and their docstrings. You can also type
506 501 %magic_function_name? (see sec. 6.4 <#sec:dyn-object-info> for
507 502 information on the '?' system) to get information about any particular
508 503 magic function you are interested in.
509 504
510 505
511 506 Magic commands
512 507 --------------
513 508
514 509 The rest of this section is automatically generated for each release
515 510 from the docstrings in the IPython code. Therefore the formatting is
516 511 somewhat minimal, but this method has the advantage of having
517 512 information always in sync with the code.
518 513
519 514 A list of all the magic commands available in IPython's default
520 515 installation follows. This is similar to what you'll see by simply
521 516 typing %magic at the prompt, but that will also give you information
522 517 about magic commands you may have added as part of your personal
523 518 customizations.
524 519
525 520 .. magic_start
526 521
527 522 **%Exit**::
528 523
529 524 Exit IPython without confirmation.
530 525
531 526 **%Pprint**::
532 527
533 528 Toggle pretty printing on/off.
534 529
535 530 **%alias**::
536 531
537 532 Define an alias for a system command.
538 533
539 534 '%alias alias_name cmd' defines 'alias_name' as an alias for 'cmd'
540 535
541 536 Then, typing 'alias_name params' will execute the system command 'cmd
542 537 params' (from your underlying operating system).
543 538
544 539 Aliases have lower precedence than magic functions and Python normal
545 540 variables, so if 'foo' is both a Python variable and an alias, the
546 541 alias can not be executed until 'del foo' removes the Python variable.
547 542
548 543 You can use the %l specifier in an alias definition to represent the
549 544 whole line when the alias is called. For example:
550 545
551 546 In [2]: alias all echo "Input in brackets: <%l>"\
552 547 In [3]: all hello world\
553 548 Input in brackets: <hello world>
554 549
555 550 You can also define aliases with parameters using %s specifiers (one
556 551 per parameter):
557 552
558 553 In [1]: alias parts echo first %s second %s\
559 554 In [2]: %parts A B\
560 555 first A second B\
561 556 In [3]: %parts A\
562 557 Incorrect number of arguments: 2 expected.\
563 558 parts is an alias to: 'echo first %s second %s'
564 559
565 560 Note that %l and %s are mutually exclusive. You can only use one or
566 561 the other in your aliases.
567 562
568 563 Aliases expand Python variables just like system calls using ! or !!
569 564 do: all expressions prefixed with '$' get expanded. For details of
570 565 the semantic rules, see PEP-215:
571 566 http://www.python.org/peps/pep-0215.html. This is the library used by
572 567 IPython for variable expansion. If you want to access a true shell
573 568 variable, an extra $ is necessary to prevent its expansion by IPython:
574 569
575 570 In [6]: alias show echo\
576 571 In [7]: PATH='A Python string'\
577 572 In [8]: show $PATH\
578 573 A Python string\
579 574 In [9]: show $$PATH\
580 575 /usr/local/lf9560/bin:/usr/local/intel/compiler70/ia32/bin:...
581 576
582 577 You can use the alias facility to acess all of $PATH. See the %rehash
583 578 and %rehashx functions, which automatically create aliases for the
584 579 contents of your $PATH.
585 580
586 581 If called with no parameters, %alias prints the current alias table.
587 582
588 583 **%autocall**::
589 584
590 585 Make functions callable without having to type parentheses.
591 586
592 587 Usage:
593 588
594 589 %autocall [mode]
595 590
596 591 The mode can be one of: 0->Off, 1->Smart, 2->Full. If not given, the
597 592 value is toggled on and off (remembering the previous state).
598 593
599 594 In more detail, these values mean:
600 595
601 596 0 -> fully disabled
602 597
603 598 1 -> active, but do not apply if there are no arguments on the line.
604 599
605 600 In this mode, you get:
606 601
607 602 In [1]: callable
608 603 Out[1]: <built-in function callable>
609 604
610 605 In [2]: callable 'hello'
611 606 ------> callable('hello')
612 607 Out[2]: False
613 608
614 609 2 -> Active always. Even if no arguments are present, the callable
615 610 object is called:
616 611
617 612 In [4]: callable
618 613 ------> callable()
619 614
620 615 Note that even with autocall off, you can still use '/' at the start of
621 616 a line to treat the first argument on the command line as a function
622 617 and add parentheses to it:
623 618
624 619 In [8]: /str 43
625 620 ------> str(43)
626 621 Out[8]: '43'
627 622
628 623 **%autoindent**::
629 624
630 625 Toggle autoindent on/off (if available).
631 626
632 627 **%automagic**::
633 628
634 629 Make magic functions callable without having to type the initial %.
635 630
636 631 Without argumentsl toggles on/off (when off, you must call it as
637 632 %automagic, of course). With arguments it sets the value, and you can
638 633 use any of (case insensitive):
639 634
640 635 - on,1,True: to activate
641 636
642 637 - off,0,False: to deactivate.
643 638
644 639 Note that magic functions have lowest priority, so if there's a
645 640 variable whose name collides with that of a magic fn, automagic won't
646 641 work for that function (you get the variable instead). However, if you
647 642 delete the variable (del var), the previously shadowed magic function
648 643 becomes visible to automagic again.
649 644
650 645 **%bg**::
651 646
652 647 Run a job in the background, in a separate thread.
653 648
654 649 For example,
655 650
656 651 %bg myfunc(x,y,z=1)
657 652
658 653 will execute 'myfunc(x,y,z=1)' in a background thread. As soon as the
659 654 execution starts, a message will be printed indicating the job
660 655 number. If your job number is 5, you can use
661 656
662 657 myvar = jobs.result(5) or myvar = jobs[5].result
663 658
664 659 to assign this result to variable 'myvar'.
665 660
666 661 IPython has a job manager, accessible via the 'jobs' object. You can
667 662 type jobs? to get more information about it, and use jobs.<TAB> to see
668 663 its attributes. All attributes not starting with an underscore are
669 664 meant for public use.
670 665
671 666 In particular, look at the jobs.new() method, which is used to create
672 667 new jobs. This magic %bg function is just a convenience wrapper
673 668 around jobs.new(), for expression-based jobs. If you want to create a
674 669 new job with an explicit function object and arguments, you must call
675 670 jobs.new() directly.
676 671
677 672 The jobs.new docstring also describes in detail several important
678 673 caveats associated with a thread-based model for background job
679 674 execution. Type jobs.new? for details.
680 675
681 676 You can check the status of all jobs with jobs.status().
682 677
683 678 The jobs variable is set by IPython into the Python builtin namespace.
684 679 If you ever declare a variable named 'jobs', you will shadow this
685 680 name. You can either delete your global jobs variable to regain
686 681 access to the job manager, or make a new name and assign it manually
687 682 to the manager (stored in IPython's namespace). For example, to
688 683 assign the job manager to the Jobs name, use:
689 684
690 685 Jobs = __builtins__.jobs
691 686
692 687 **%bookmark**::
693 688
694 689 Manage IPython's bookmark system.
695 690
696 691 %bookmark <name> - set bookmark to current dir
697 692 %bookmark <name> <dir> - set bookmark to <dir>
698 693 %bookmark -l - list all bookmarks
699 694 %bookmark -d <name> - remove bookmark
700 695 %bookmark -r - remove all bookmarks
701 696
702 697 You can later on access a bookmarked folder with:
703 698 %cd -b <name>
704 699 or simply '%cd <name>' if there is no directory called <name> AND
705 700 there is such a bookmark defined.
706 701
707 702 Your bookmarks persist through IPython sessions, but they are
708 703 associated with each profile.
709 704
710 705 **%cd**::
711 706
712 707 Change the current working directory.
713 708
714 709 This command automatically maintains an internal list of directories
715 710 you visit during your IPython session, in the variable _dh. The
716 711 command %dhist shows this history nicely formatted. You can also
717 712 do 'cd -<tab>' to see directory history conveniently.
718 713
719 714 Usage:
720 715
721 716 cd 'dir': changes to directory 'dir'.
722 717
723 718 cd -: changes to the last visited directory.
724 719
725 720 cd -<n>: changes to the n-th directory in the directory history.
726 721
727 722 cd -b <bookmark_name>: jump to a bookmark set by %bookmark
728 723 (note: cd <bookmark_name> is enough if there is no
729 724 directory <bookmark_name>, but a bookmark with the name exists.)
730 725 'cd -b <tab>' allows you to tab-complete bookmark names.
731 726
732 727 Options:
733 728
734 729 -q: quiet. Do not print the working directory after the cd command is
735 730 executed. By default IPython's cd command does print this directory,
736 731 since the default prompts do not display path information.
737 732
738 733 Note that !cd doesn't work for this purpose because the shell where
739 734 !command runs is immediately discarded after executing 'command'.
740 735
741 736 **%clear**::
742 737
743 738 Clear various data (e.g. stored history data)
744 739
745 740 %clear out - clear output history
746 741 %clear in - clear input history
747 742 %clear shadow_compress - Compresses shadow history (to speed up ipython)
748 743 %clear shadow_nuke - permanently erase all entries in shadow history
749 744 %clear dhist - clear dir history
750 745
751 746 **%color_info**::
752 747
753 748 Toggle color_info.
754 749
755 750 The color_info configuration parameter controls whether colors are
756 751 used for displaying object details (by things like %psource, %pfile or
757 752 the '?' system). This function toggles this value with each call.
758 753
759 754 Note that unless you have a fairly recent pager (less works better
760 755 than more) in your system, using colored object information displays
761 756 will not work properly. Test it and see.
762 757
763 758 **%colors**::
764 759
765 760 Switch color scheme for prompts, info system and exception handlers.
766 761
767 762 Currently implemented schemes: NoColor, Linux, LightBG.
768 763
769 764 Color scheme names are not case-sensitive.
770 765
771 766 **%cpaste**::
772 767
773 768 Allows you to paste & execute a pre-formatted code block from clipboard
774 769
775 770 You must terminate the block with '--' (two minus-signs) alone on the
776 771 line. You can also provide your own sentinel with '%paste -s %%' ('%%'
777 772 is the new sentinel for this operation)
778 773
779 774 The block is dedented prior to execution to enable execution of method
780 775 definitions. '>' and '+' characters at the beginning of a line are
781 776 ignored, to allow pasting directly from e-mails or diff files. The
782 777 executed block is also assigned to variable named 'pasted_block' for
783 778 later editing with '%edit pasted_block'.
784 779
785 780 You can also pass a variable name as an argument, e.g. '%cpaste foo'.
786 781 This assigns the pasted block to variable 'foo' as string, without
787 782 dedenting or executing it.
788 783
789 784 Do not be alarmed by garbled output on Windows (it's a readline bug).
790 785 Just press enter and type -- (and press enter again) and the block
791 786 will be what was just pasted.
792 787
793 788 IPython statements (magics, shell escapes) are not supported (yet).
794 789
795 790 **%debug**::
796 791
797 792 Activate the interactive debugger in post-mortem mode.
798 793
799 794 If an exception has just occurred, this lets you inspect its stack
800 795 frames interactively. Note that this will always work only on the last
801 796 traceback that occurred, so you must call this quickly after an
802 797 exception that you wish to inspect has fired, because if another one
803 798 occurs, it clobbers the previous one.
804 799
805 800 If you want IPython to automatically do this on every exception, see
806 801 the %pdb magic for more details.
807 802
808 803 **%dhist**::
809 804
810 805 Print your history of visited directories.
811 806
812 807 %dhist -> print full history\
813 808 %dhist n -> print last n entries only\
814 809 %dhist n1 n2 -> print entries between n1 and n2 (n1 not included)\
815 810
816 811 This history is automatically maintained by the %cd command, and
817 812 always available as the global list variable _dh. You can use %cd -<n>
818 813 to go to directory number <n>.
819 814
820 815 Note that most of time, you should view directory history by entering
821 816 cd -<TAB>.
822 817
823 818 **%dirs**::
824 819
825 820 Return the current directory stack.
826 821
827 822 **%doctest_mode**::
828 823
829 824 Toggle doctest mode on and off.
830 825
831 826 This mode allows you to toggle the prompt behavior between normal
832 827 IPython prompts and ones that are as similar to the default IPython
833 828 interpreter as possible.
834 829
835 830 It also supports the pasting of code snippets that have leading '>>>'
836 831 and '...' prompts in them. This means that you can paste doctests from
837 832 files or docstrings (even if they have leading whitespace), and the
838 833 code will execute correctly. You can then use '%history -tn' to see
839 834 the translated history without line numbers; this will give you the
840 835 input after removal of all the leading prompts and whitespace, which
841 836 can be pasted back into an editor.
842 837
843 838 With these features, you can switch into this mode easily whenever you
844 839 need to do testing and changes to doctests, without having to leave
845 840 your existing IPython session.
846 841
847 842 **%ed**::
848 843
849 844 Alias to %edit.
850 845
851 846 **%edit**::
852 847
853 848 Bring up an editor and execute the resulting code.
854 849
855 850 Usage:
856 851 %edit [options] [args]
857 852
858 853 %edit runs IPython's editor hook. The default version of this hook is
859 854 set to call the __IPYTHON__.rc.editor command. This is read from your
860 855 environment variable $EDITOR. If this isn't found, it will default to
861 856 vi under Linux/Unix and to notepad under Windows. See the end of this
862 857 docstring for how to change the editor hook.
863 858
864 859 You can also set the value of this editor via the command line option
865 860 '-editor' or in your ipythonrc file. This is useful if you wish to use
866 861 specifically for IPython an editor different from your typical default
867 862 (and for Windows users who typically don't set environment variables).
868 863
869 864 This command allows you to conveniently edit multi-line code right in
870 865 your IPython session.
871 866
872 867 If called without arguments, %edit opens up an empty editor with a
873 868 temporary file and will execute the contents of this file when you
874 869 close it (don't forget to save it!).
875 870
876 871
877 872 Options:
878 873
879 874 -n <number>: open the editor at a specified line number. By default,
880 875 the IPython editor hook uses the unix syntax 'editor +N filename', but
881 876 you can configure this by providing your own modified hook if your
882 877 favorite editor supports line-number specifications with a different
883 878 syntax.
884 879
885 880 -p: this will call the editor with the same data as the previous time
886 881 it was used, regardless of how long ago (in your current session) it
887 882 was.
888 883
889 884 -r: use 'raw' input. This option only applies to input taken from the
890 885 user's history. By default, the 'processed' history is used, so that
891 886 magics are loaded in their transformed version to valid Python. If
892 887 this option is given, the raw input as typed as the command line is
893 888 used instead. When you exit the editor, it will be executed by
894 889 IPython's own processor.
895 890
896 891 -x: do not execute the edited code immediately upon exit. This is
897 892 mainly useful if you are editing programs which need to be called with
898 893 command line arguments, which you can then do using %run.
899 894
900 895
901 896 Arguments:
902 897
903 898 If arguments are given, the following possibilites exist:
904 899
905 900 - The arguments are numbers or pairs of colon-separated numbers (like
906 901 1 4:8 9). These are interpreted as lines of previous input to be
907 902 loaded into the editor. The syntax is the same of the %macro command.
908 903
909 904 - If the argument doesn't start with a number, it is evaluated as a
910 905 variable and its contents loaded into the editor. You can thus edit
911 906 any string which contains python code (including the result of
912 907 previous edits).
913 908
914 909 - If the argument is the name of an object (other than a string),
915 910 IPython will try to locate the file where it was defined and open the
916 911 editor at the point where it is defined. You can use `%edit function`
917 912 to load an editor exactly at the point where 'function' is defined,
918 913 edit it and have the file be executed automatically.
919 914
920 915 If the object is a macro (see %macro for details), this opens up your
921 916 specified editor with a temporary file containing the macro's data.
922 917 Upon exit, the macro is reloaded with the contents of the file.
923 918
924 919 Note: opening at an exact line is only supported under Unix, and some
925 920 editors (like kedit and gedit up to Gnome 2.8) do not understand the
926 921 '+NUMBER' parameter necessary for this feature. Good editors like
927 922 (X)Emacs, vi, jed, pico and joe all do.
928 923
929 924 - If the argument is not found as a variable, IPython will look for a
930 925 file with that name (adding .py if necessary) and load it into the
931 926 editor. It will execute its contents with execfile() when you exit,
932 927 loading any code in the file into your interactive namespace.
933 928
934 929 After executing your code, %edit will return as output the code you
935 930 typed in the editor (except when it was an existing file). This way
936 931 you can reload the code in further invocations of %edit as a variable,
937 932 via _<NUMBER> or Out[<NUMBER>], where <NUMBER> is the prompt number of
938 933 the output.
939 934
940 935 Note that %edit is also available through the alias %ed.
941 936
942 937 This is an example of creating a simple function inside the editor and
943 938 then modifying it. First, start up the editor:
944 939
945 940 In [1]: ed\
946 941 Editing... done. Executing edited code...\
947 942 Out[1]: 'def foo():\n print "foo() was defined in an editing session"\n'
948 943
949 944 We can then call the function foo():
950 945
951 946 In [2]: foo()\
952 947 foo() was defined in an editing session
953 948
954 949 Now we edit foo. IPython automatically loads the editor with the
955 950 (temporary) file where foo() was previously defined:
956 951
957 952 In [3]: ed foo\
958 953 Editing... done. Executing edited code...
959 954
960 955 And if we call foo() again we get the modified version:
961 956
962 957 In [4]: foo()\
963 958 foo() has now been changed!
964 959
965 960 Here is an example of how to edit a code snippet successive
966 961 times. First we call the editor:
967 962
968 963 In [8]: ed\
969 964 Editing... done. Executing edited code...\
970 965 hello\
971 966 Out[8]: "print 'hello'\n"
972 967
973 968 Now we call it again with the previous output (stored in _):
974 969
975 970 In [9]: ed _\
976 971 Editing... done. Executing edited code...\
977 972 hello world\
978 973 Out[9]: "print 'hello world'\n"
979 974
980 975 Now we call it with the output #8 (stored in _8, also as Out[8]):
981 976
982 977 In [10]: ed _8\
983 978 Editing... done. Executing edited code...\
984 979 hello again\
985 980 Out[10]: "print 'hello again'\n"
986 981
987 982
988 983 Changing the default editor hook:
989 984
990 985 If you wish to write your own editor hook, you can put it in a
991 986 configuration file which you load at startup time. The default hook
992 987 is defined in the IPython.hooks module, and you can use that as a
993 988 starting example for further modifications. That file also has
994 989 general instructions on how to set a new hook for use once you've
995 990 defined it.
996 991
997 992 **%env**::
998 993
999 994 List environment variables.
1000 995
1001 996 **%exit**::
1002 997
1003 998 Exit IPython, confirming if configured to do so.
1004 999
1005 1000 You can configure whether IPython asks for confirmation upon exit by
1006 1001 setting the confirm_exit flag in the ipythonrc file.
1007 1002
1008 1003 **%hist**::
1009 1004
1010 1005 Alternate name for %history.
1011 1006
1012 1007 **%history**::
1013 1008
1014 1009 Print input history (_i<n> variables), with most recent last.
1015 1010
1016 1011 %history -> print at most 40 inputs (some may be multi-line)\
1017 1012 %history n -> print at most n inputs\
1018 1013 %history n1 n2 -> print inputs between n1 and n2 (n2 not included)\
1019 1014
1020 1015 Each input's number <n> is shown, and is accessible as the
1021 1016 automatically generated variable _i<n>. Multi-line statements are
1022 1017 printed starting at a new line for easy copy/paste.
1023 1018
1024 1019
1025 1020 Options:
1026 1021
1027 1022 -n: do NOT print line numbers. This is useful if you want to get a
1028 1023 printout of many lines which can be directly pasted into a text
1029 1024 editor.
1030 1025
1031 1026 This feature is only available if numbered prompts are in use.
1032 1027
1033 1028 -t: (default) print the 'translated' history, as IPython understands it.
1034 1029 IPython filters your input and converts it all into valid Python source
1035 1030 before executing it (things like magics or aliases are turned into
1036 1031 function calls, for example). With this option, you'll see the native
1037 1032 history instead of the user-entered version: '%cd /' will be seen as
1038 1033 '_ip.magic("%cd /")' instead of '%cd /'.
1039 1034
1040 1035 -r: print the 'raw' history, i.e. the actual commands you typed.
1041 1036
1042 1037 -g: treat the arg as a pattern to grep for in (full) history.
1043 1038 This includes the "shadow history" (almost all commands ever written).
1044 1039 Use '%hist -g' to show full shadow history (may be very long).
1045 1040 In shadow history, every index nuwber starts with 0.
1046 1041
1047 1042 -f FILENAME: instead of printing the output to the screen, redirect it to
1048 1043 the given file. The file is always overwritten, though IPython asks for
1049 1044 confirmation first if it already exists.
1050 1045
1051 1046 **%logoff**::
1052 1047
1053 1048 Temporarily stop logging.
1054 1049
1055 1050 You must have previously started logging.
1056 1051
1057 1052 **%logon**::
1058 1053
1059 1054 Restart logging.
1060 1055
1061 1056 This function is for restarting logging which you've temporarily
1062 1057 stopped with %logoff. For starting logging for the first time, you
1063 1058 must use the %logstart function, which allows you to specify an
1064 1059 optional log filename.
1065 1060
1066 1061 **%logstart**::
1067 1062
1068 1063 Start logging anywhere in a session.
1069 1064
1070 1065 %logstart [-o|-r|-t] [log_name [log_mode]]
1071 1066
1072 1067 If no name is given, it defaults to a file named 'ipython_log.py' in your
1073 1068 current directory, in 'rotate' mode (see below).
1074 1069
1075 1070 '%logstart name' saves to file 'name' in 'backup' mode. It saves your
1076 1071 history up to that point and then continues logging.
1077 1072
1078 1073 %logstart takes a second optional parameter: logging mode. This can be one
1079 1074 of (note that the modes are given unquoted):\
1080 1075 append: well, that says it.\
1081 1076 backup: rename (if exists) to name~ and start name.\
1082 1077 global: single logfile in your home dir, appended to.\
1083 1078 over : overwrite existing log.\
1084 1079 rotate: create rotating logs name.1~, name.2~, etc.
1085 1080
1086 1081 Options:
1087 1082
1088 1083 -o: log also IPython's output. In this mode, all commands which
1089 1084 generate an Out[NN] prompt are recorded to the logfile, right after
1090 1085 their corresponding input line. The output lines are always
1091 1086 prepended with a '#[Out]# ' marker, so that the log remains valid
1092 1087 Python code.
1093 1088
1094 1089 Since this marker is always the same, filtering only the output from
1095 1090 a log is very easy, using for example a simple awk call:
1096 1091
1097 1092 awk -F'#\[Out\]# ' '{if($2) {print $2}}' ipython_log.py
1098 1093
1099 1094 -r: log 'raw' input. Normally, IPython's logs contain the processed
1100 1095 input, so that user lines are logged in their final form, converted
1101 1096 into valid Python. For example, %Exit is logged as
1102 1097 '_ip.magic("Exit"). If the -r flag is given, all input is logged
1103 1098 exactly as typed, with no transformations applied.
1104 1099
1105 1100 -t: put timestamps before each input line logged (these are put in
1106 1101 comments).
1107 1102
1108 1103 **%logstate**::
1109 1104
1110 1105 Print the status of the logging system.
1111 1106
1112 1107 **%logstop**::
1113 1108
1114 1109 Fully stop logging and close log file.
1115 1110
1116 1111 In order to start logging again, a new %logstart call needs to be made,
1117 1112 possibly (though not necessarily) with a new filename, mode and other
1118 1113 options.
1119 1114
1120 1115 **%lsmagic**::
1121 1116
1122 1117 List currently available magic functions.
1123 1118
1124 1119 **%macro**::
1125 1120
1126 1121 Define a set of input lines as a macro for future re-execution.
1127 1122
1128 1123 Usage:\
1129 1124 %macro [options] name n1-n2 n3-n4 ... n5 .. n6 ...
1130 1125
1131 1126 Options:
1132 1127
1133 1128 -r: use 'raw' input. By default, the 'processed' history is used,
1134 1129 so that magics are loaded in their transformed version to valid
1135 1130 Python. If this option is given, the raw input as typed as the
1136 1131 command line is used instead.
1137 1132
1138 1133 This will define a global variable called `name` which is a string
1139 1134 made of joining the slices and lines you specify (n1,n2,... numbers
1140 1135 above) from your input history into a single string. This variable
1141 1136 acts like an automatic function which re-executes those lines as if
1142 1137 you had typed them. You just type 'name' at the prompt and the code
1143 1138 executes.
1144 1139
1145 1140 The notation for indicating number ranges is: n1-n2 means 'use line
1146 1141 numbers n1,...n2' (the endpoint is included). That is, '5-7' means
1147 1142 using the lines numbered 5,6 and 7.
1148 1143
1149 1144 Note: as a 'hidden' feature, you can also use traditional python slice
1150 1145 notation, where N:M means numbers N through M-1.
1151 1146
1152 1147 For example, if your history contains (%hist prints it):
1153 1148
1154 1149 44: x=1\
1155 1150 45: y=3\
1156 1151 46: z=x+y\
1157 1152 47: print x\
1158 1153 48: a=5\
1159 1154 49: print 'x',x,'y',y\
1160 1155
1161 1156 you can create a macro with lines 44 through 47 (included) and line 49
1162 1157 called my_macro with:
1163 1158
1164 1159 In [51]: %macro my_macro 44-47 49
1165 1160
1166 1161 Now, typing `my_macro` (without quotes) will re-execute all this code
1167 1162 in one pass.
1168 1163
1169 1164 You don't need to give the line-numbers in order, and any given line
1170 1165 number can appear multiple times. You can assemble macros with any
1171 1166 lines from your input history in any order.
1172 1167
1173 1168 The macro is a simple object which holds its value in an attribute,
1174 1169 but IPython's display system checks for macros and executes them as
1175 1170 code instead of printing them when you type their name.
1176 1171
1177 1172 You can view a macro's contents by explicitly printing it with:
1178 1173
1179 1174 'print macro_name'.
1180 1175
1181 1176 For one-off cases which DON'T contain magic function calls in them you
1182 1177 can obtain similar results by explicitly executing slices from your
1183 1178 input history with:
1184 1179
1185 1180 In [60]: exec In[44:48]+In[49]
1186 1181
1187 1182 **%magic**::
1188 1183
1189 1184 Print information about the magic function system.
1190 1185
1191 1186 **%mglob**::
1192 1187
1193 1188 This program allows specifying filenames with "mglob" mechanism.
1194 1189 Supported syntax in globs (wilcard matching patterns)::
1195 1190
1196 1191 *.cpp ?ellowo*
1197 1192 - obvious. Differs from normal glob in that dirs are not included.
1198 1193 Unix users might want to write this as: "*.cpp" "?ellowo*"
1199 1194 rec:/usr/share=*.txt,*.doc
1200 1195 - get all *.txt and *.doc under /usr/share,
1201 1196 recursively
1202 1197 rec:/usr/share
1203 1198 - All files under /usr/share, recursively
1204 1199 rec:*.py
1205 1200 - All .py files under current working dir, recursively
1206 1201 foo
1207 1202 - File or dir foo
1208 1203 !*.bak readme*
1209 1204 - readme*, exclude files ending with .bak
1210 1205 !.svn/ !.hg/ !*_Data/ rec:.
1211 1206 - Skip .svn, .hg, foo_Data dirs (and their subdirs) in recurse.
1212 1207 Trailing / is the key, \ does not work!
1213 1208 dir:foo
1214 1209 - the directory foo if it exists (not files in foo)
1215 1210 dir:*
1216 1211 - all directories in current folder
1217 1212 foo.py bar.* !h* rec:*.py
1218 1213 - Obvious. !h* exclusion only applies for rec:*.py.
1219 1214 foo.py is *not* included twice.
1220 1215 @filelist.txt
1221 1216 - All files listed in 'filelist.txt' file, on separate lines.
1222 1217
1223 1218 **%page**::
1224 1219
1225 1220 Pretty print the object and display it through a pager.
1226 1221
1227 1222 %page [options] OBJECT
1228 1223
1229 1224 If no object is given, use _ (last output).
1230 1225
1231 1226 Options:
1232 1227
1233 1228 -r: page str(object), don't pretty-print it.
1234 1229
1235 1230 **%pdb**::
1236 1231
1237 1232 Control the automatic calling of the pdb interactive debugger.
1238 1233
1239 1234 Call as '%pdb on', '%pdb 1', '%pdb off' or '%pdb 0'. If called without
1240 1235 argument it works as a toggle.
1241 1236
1242 1237 When an exception is triggered, IPython can optionally call the
1243 1238 interactive pdb debugger after the traceback printout. %pdb toggles
1244 1239 this feature on and off.
1245 1240
1246 1241 The initial state of this feature is set in your ipythonrc
1247 1242 configuration file (the variable is called 'pdb').
1248 1243
1249 1244 If you want to just activate the debugger AFTER an exception has fired,
1250 1245 without having to type '%pdb on' and rerunning your code, you can use
1251 1246 the %debug magic.
1252 1247
1253 1248 **%pdef**::
1254 1249
1255 1250 Print the definition header for any callable object.
1256 1251
1257 1252 If the object is a class, print the constructor information.
1258 1253
1259 1254 **%pdoc**::
1260 1255
1261 1256 Print the docstring for an object.
1262 1257
1263 1258 If the given object is a class, it will print both the class and the
1264 1259 constructor docstrings.
1265 1260
1266 1261 **%pfile**::
1267 1262
1268 1263 Print (or run through pager) the file where an object is defined.
1269 1264
1270 1265 The file opens at the line where the object definition begins. IPython
1271 1266 will honor the environment variable PAGER if set, and otherwise will
1272 1267 do its best to print the file in a convenient form.
1273 1268
1274 1269 If the given argument is not an object currently defined, IPython will
1275 1270 try to interpret it as a filename (automatically adding a .py extension
1276 1271 if needed). You can thus use %pfile as a syntax highlighting code
1277 1272 viewer.
1278 1273
1279 1274 **%pinfo**::
1280 1275
1281 1276 Provide detailed information about an object.
1282 1277
1283 1278 '%pinfo object' is just a synonym for object? or ?object.
1284 1279
1285 1280 **%popd**::
1286 1281
1287 1282 Change to directory popped off the top of the stack.
1288 1283
1289 1284 **%profile**::
1290 1285
1291 1286 Print your currently active IPyhton profile.
1292 1287
1293 1288 **%prun**::
1294 1289
1295 1290 Run a statement through the python code profiler.
1296 1291
1297 1292 Usage:\
1298 1293 %prun [options] statement
1299 1294
1300 1295 The given statement (which doesn't require quote marks) is run via the
1301 1296 python profiler in a manner similar to the profile.run() function.
1302 1297 Namespaces are internally managed to work correctly; profile.run
1303 1298 cannot be used in IPython because it makes certain assumptions about
1304 1299 namespaces which do not hold under IPython.
1305 1300
1306 1301 Options:
1307 1302
1308 1303 -l <limit>: you can place restrictions on what or how much of the
1309 1304 profile gets printed. The limit value can be:
1310 1305
1311 1306 * A string: only information for function names containing this string
1312 1307 is printed.
1313 1308
1314 1309 * An integer: only these many lines are printed.
1315 1310
1316 1311 * A float (between 0 and 1): this fraction of the report is printed
1317 1312 (for example, use a limit of 0.4 to see the topmost 40% only).
1318 1313
1319 1314 You can combine several limits with repeated use of the option. For
1320 1315 example, '-l __init__ -l 5' will print only the topmost 5 lines of
1321 1316 information about class constructors.
1322 1317
1323 1318 -r: return the pstats.Stats object generated by the profiling. This
1324 1319 object has all the information about the profile in it, and you can
1325 1320 later use it for further analysis or in other functions.
1326 1321
1327 1322 -s <key>: sort profile by given key. You can provide more than one key
1328 1323 by using the option several times: '-s key1 -s key2 -s key3...'. The
1329 1324 default sorting key is 'time'.
1330 1325
1331 1326 The following is copied verbatim from the profile documentation
1332 1327 referenced below:
1333 1328
1334 1329 When more than one key is provided, additional keys are used as
1335 1330 secondary criteria when the there is equality in all keys selected
1336 1331 before them.
1337 1332
1338 1333 Abbreviations can be used for any key names, as long as the
1339 1334 abbreviation is unambiguous. The following are the keys currently
1340 1335 defined:
1341 1336
1342 1337 Valid Arg Meaning\
1343 1338 "calls" call count\
1344 1339 "cumulative" cumulative time\
1345 1340 "file" file name\
1346 1341 "module" file name\
1347 1342 "pcalls" primitive call count\
1348 1343 "line" line number\
1349 1344 "name" function name\
1350 1345 "nfl" name/file/line\
1351 1346 "stdname" standard name\
1352 1347 "time" internal time
1353 1348
1354 1349 Note that all sorts on statistics are in descending order (placing
1355 1350 most time consuming items first), where as name, file, and line number
1356 1351 searches are in ascending order (i.e., alphabetical). The subtle
1357 1352 distinction between "nfl" and "stdname" is that the standard name is a
1358 1353 sort of the name as printed, which means that the embedded line
1359 1354 numbers get compared in an odd way. For example, lines 3, 20, and 40
1360 1355 would (if the file names were the same) appear in the string order
1361 1356 "20" "3" and "40". In contrast, "nfl" does a numeric compare of the
1362 1357 line numbers. In fact, sort_stats("nfl") is the same as
1363 1358 sort_stats("name", "file", "line").
1364 1359
1365 1360 -T <filename>: save profile results as shown on screen to a text
1366 1361 file. The profile is still shown on screen.
1367 1362
1368 1363 -D <filename>: save (via dump_stats) profile statistics to given
1369 1364 filename. This data is in a format understod by the pstats module, and
1370 1365 is generated by a call to the dump_stats() method of profile
1371 1366 objects. The profile is still shown on screen.
1372 1367
1373 1368 If you want to run complete programs under the profiler's control, use
1374 1369 '%run -p [prof_opts] filename.py [args to program]' where prof_opts
1375 1370 contains profiler specific options as described here.
1376 1371
1377 1372 You can read the complete documentation for the profile module with:\
1378 1373 In [1]: import profile; profile.help()
1379 1374
1380 1375 **%psearch**::
1381 1376
1382 1377 Search for object in namespaces by wildcard.
1383 1378
1384 1379 %psearch [options] PATTERN [OBJECT TYPE]
1385 1380
1386 1381 Note: ? can be used as a synonym for %psearch, at the beginning or at
1387 1382 the end: both a*? and ?a* are equivalent to '%psearch a*'. Still, the
1388 1383 rest of the command line must be unchanged (options come first), so
1389 1384 for example the following forms are equivalent
1390 1385
1391 1386 %psearch -i a* function
1392 1387 -i a* function?
1393 1388 ?-i a* function
1394 1389
1395 1390 Arguments:
1396 1391
1397 1392 PATTERN
1398 1393
1399 1394 where PATTERN is a string containing * as a wildcard similar to its
1400 1395 use in a shell. The pattern is matched in all namespaces on the
1401 1396 search path. By default objects starting with a single _ are not
1402 1397 matched, many IPython generated objects have a single
1403 1398 underscore. The default is case insensitive matching. Matching is
1404 1399 also done on the attributes of objects and not only on the objects
1405 1400 in a module.
1406 1401
1407 1402 [OBJECT TYPE]
1408 1403
1409 1404 Is the name of a python type from the types module. The name is
1410 1405 given in lowercase without the ending type, ex. StringType is
1411 1406 written string. By adding a type here only objects matching the
1412 1407 given type are matched. Using all here makes the pattern match all
1413 1408 types (this is the default).
1414 1409
1415 1410 Options:
1416 1411
1417 1412 -a: makes the pattern match even objects whose names start with a
1418 1413 single underscore. These names are normally ommitted from the
1419 1414 search.
1420 1415
1421 1416 -i/-c: make the pattern case insensitive/sensitive. If neither of
1422 1417 these options is given, the default is read from your ipythonrc
1423 1418 file. The option name which sets this value is
1424 1419 'wildcards_case_sensitive'. If this option is not specified in your
1425 1420 ipythonrc file, IPython's internal default is to do a case sensitive
1426 1421 search.
1427 1422
1428 1423 -e/-s NAMESPACE: exclude/search a given namespace. The pattern you
1429 1424 specifiy can be searched in any of the following namespaces:
1430 1425 'builtin', 'user', 'user_global','internal', 'alias', where
1431 1426 'builtin' and 'user' are the search defaults. Note that you should
1432 1427 not use quotes when specifying namespaces.
1433 1428
1434 1429 'Builtin' contains the python module builtin, 'user' contains all
1435 1430 user data, 'alias' only contain the shell aliases and no python
1436 1431 objects, 'internal' contains objects used by IPython. The
1437 1432 'user_global' namespace is only used by embedded IPython instances,
1438 1433 and it contains module-level globals. You can add namespaces to the
1439 1434 search with -s or exclude them with -e (these options can be given
1440 1435 more than once).
1441 1436
1442 1437 Examples:
1443 1438
1444 1439 %psearch a* -> objects beginning with an a
1445 1440 %psearch -e builtin a* -> objects NOT in the builtin space starting in a
1446 1441 %psearch a* function -> all functions beginning with an a
1447 1442 %psearch re.e* -> objects beginning with an e in module re
1448 1443 %psearch r*.e* -> objects that start with e in modules starting in r
1449 1444 %psearch r*.* string -> all strings in modules beginning with r
1450 1445
1451 1446 Case sensitve search:
1452 1447
1453 1448 %psearch -c a* list all object beginning with lower case a
1454 1449
1455 1450 Show objects beginning with a single _:
1456 1451
1457 1452 %psearch -a _* list objects beginning with a single underscore
1458 1453
1459 1454 **%psource**::
1460 1455
1461 1456 Print (or run through pager) the source code for an object.
1462 1457
1463 1458 **%pushd**::
1464 1459
1465 1460 Place the current dir on stack and change directory.
1466 1461
1467 1462 Usage:\
1468 1463 %pushd ['dirname']
1469 1464
1470 1465 **%pwd**::
1471 1466
1472 1467 Return the current working directory path.
1473 1468
1474 1469 **%pycat**::
1475 1470
1476 1471 Show a syntax-highlighted file through a pager.
1477 1472
1478 1473 This magic is similar to the cat utility, but it will assume the file
1479 1474 to be Python source and will show it with syntax highlighting.
1480 1475
1481 1476 **%quickref**::
1482 1477
1483 1478 Show a quick reference sheet
1484 1479
1485 1480 **%quit**::
1486 1481
1487 1482 Exit IPython, confirming if configured to do so (like %exit)
1488 1483
1489 1484 **%r**::
1490 1485
1491 1486 Repeat previous input.
1492 1487
1493 1488 Note: Consider using the more powerfull %rep instead!
1494 1489
1495 1490 If given an argument, repeats the previous command which starts with
1496 1491 the same string, otherwise it just repeats the previous input.
1497 1492
1498 1493 Shell escaped commands (with ! as first character) are not recognized
1499 1494 by this system, only pure python code and magic commands.
1500 1495
1501 1496 **%rehashdir**::
1502 1497
1503 1498 Add executables in all specified dirs to alias table
1504 1499
1505 1500 Usage:
1506 1501
1507 1502 %rehashdir c:/bin;c:/tools
1508 1503 - Add all executables under c:/bin and c:/tools to alias table, in
1509 1504 order to make them directly executable from any directory.
1510 1505
1511 1506 Without arguments, add all executables in current directory.
1512 1507
1513 1508 **%rehashx**::
1514 1509
1515 1510 Update the alias table with all executable files in $PATH.
1516 1511
1517 1512 This version explicitly checks that every entry in $PATH is a file
1518 1513 with execute access (os.X_OK), so it is much slower than %rehash.
1519 1514
1520 1515 Under Windows, it checks executability as a match agains a
1521 1516 '|'-separated string of extensions, stored in the IPython config
1522 1517 variable win_exec_ext. This defaults to 'exe|com|bat'.
1523 1518
1524 1519 This function also resets the root module cache of module completer,
1525 1520 used on slow filesystems.
1526 1521
1527 1522 **%rep**::
1528 1523
1529 1524 Repeat a command, or get command to input line for editing
1530 1525
1531 1526 - %rep (no arguments):
1532 1527
1533 1528 Place a string version of last computation result (stored in the special '_'
1534 1529 variable) to the next input prompt. Allows you to create elaborate command
1535 1530 lines without using copy-paste::
1536 1531
1537 1532 $ l = ["hei", "vaan"]
1538 1533 $ "".join(l)
1539 1534 ==> heivaan
1540 1535 $ %rep
1541 1536 $ heivaan_ <== cursor blinking
1542 1537
1543 1538 %rep 45
1544 1539
1545 1540 Place history line 45 to next input prompt. Use %hist to find out the
1546 1541 number.
1547 1542
1548 1543 %rep 1-4 6-7 3
1549 1544
1550 1545 Repeat the specified lines immediately. Input slice syntax is the same as
1551 1546 in %macro and %save.
1552 1547
1553 1548 %rep foo
1554 1549
1555 1550 Place the most recent line that has the substring "foo" to next input.
1556 1551 (e.g. 'svn ci -m foobar').
1557 1552
1558 1553 **%reset**::
1559 1554
1560 1555 Resets the namespace by removing all names defined by the user.
1561 1556
1562 1557 Input/Output history are left around in case you need them.
1563 1558
1564 1559 **%run**::
1565 1560
1566 1561 Run the named file inside IPython as a program.
1567 1562
1568 1563 Usage:\
1569 1564 %run [-n -i -t [-N<N>] -d [-b<N>] -p [profile options]] file [args]
1570 1565
1571 1566 Parameters after the filename are passed as command-line arguments to
1572 1567 the program (put in sys.argv). Then, control returns to IPython's
1573 1568 prompt.
1574 1569
1575 1570 This is similar to running at a system prompt:\
1576 1571 $ python file args\
1577 1572 but with the advantage of giving you IPython's tracebacks, and of
1578 1573 loading all variables into your interactive namespace for further use
1579 1574 (unless -p is used, see below).
1580 1575
1581 1576 The file is executed in a namespace initially consisting only of
1582 1577 __name__=='__main__' and sys.argv constructed as indicated. It thus
1583 1578 sees its environment as if it were being run as a stand-alone program
1584 1579 (except for sharing global objects such as previously imported
1585 1580 modules). But after execution, the IPython interactive namespace gets
1586 1581 updated with all variables defined in the program (except for __name__
1587 1582 and sys.argv). This allows for very convenient loading of code for
1588 1583 interactive work, while giving each program a 'clean sheet' to run in.
1589 1584
1590 1585 Options:
1591 1586
1592 1587 -n: __name__ is NOT set to '__main__', but to the running file's name
1593 1588 without extension (as python does under import). This allows running
1594 1589 scripts and reloading the definitions in them without calling code
1595 1590 protected by an ' if __name__ == "__main__" ' clause.
1596 1591
1597 1592 -i: run the file in IPython's namespace instead of an empty one. This
1598 1593 is useful if you are experimenting with code written in a text editor
1599 1594 which depends on variables defined interactively.
1600 1595
1601 1596 -e: ignore sys.exit() calls or SystemExit exceptions in the script
1602 1597 being run. This is particularly useful if IPython is being used to
1603 1598 run unittests, which always exit with a sys.exit() call. In such
1604 1599 cases you are interested in the output of the test results, not in
1605 1600 seeing a traceback of the unittest module.
1606 1601
1607 1602 -t: print timing information at the end of the run. IPython will give
1608 1603 you an estimated CPU time consumption for your script, which under
1609 1604 Unix uses the resource module to avoid the wraparound problems of
1610 1605 time.clock(). Under Unix, an estimate of time spent on system tasks
1611 1606 is also given (for Windows platforms this is reported as 0.0).
1612 1607
1613 1608 If -t is given, an additional -N<N> option can be given, where <N>
1614 1609 must be an integer indicating how many times you want the script to
1615 1610 run. The final timing report will include total and per run results.
1616 1611
1617 1612 For example (testing the script uniq_stable.py):
1618 1613
1619 1614 In [1]: run -t uniq_stable
1620 1615
1621 1616 IPython CPU timings (estimated):\
1622 1617 User : 0.19597 s.\
1623 1618 System: 0.0 s.\
1624 1619
1625 1620 In [2]: run -t -N5 uniq_stable
1626 1621
1627 1622 IPython CPU timings (estimated):\
1628 1623 Total runs performed: 5\
1629 1624 Times : Total Per run\
1630 1625 User : 0.910862 s, 0.1821724 s.\
1631 1626 System: 0.0 s, 0.0 s.
1632 1627
1633 1628 -d: run your program under the control of pdb, the Python debugger.
1634 1629 This allows you to execute your program step by step, watch variables,
1635 1630 etc. Internally, what IPython does is similar to calling:
1636 1631
1637 1632 pdb.run('execfile("YOURFILENAME")')
1638 1633
1639 1634 with a breakpoint set on line 1 of your file. You can change the line
1640 1635 number for this automatic breakpoint to be <N> by using the -bN option
1641 1636 (where N must be an integer). For example:
1642 1637
1643 1638 %run -d -b40 myscript
1644 1639
1645 1640 will set the first breakpoint at line 40 in myscript.py. Note that
1646 1641 the first breakpoint must be set on a line which actually does
1647 1642 something (not a comment or docstring) for it to stop execution.
1648 1643
1649 1644 When the pdb debugger starts, you will see a (Pdb) prompt. You must
1650 1645 first enter 'c' (without qoutes) to start execution up to the first
1651 1646 breakpoint.
1652 1647
1653 1648 Entering 'help' gives information about the use of the debugger. You
1654 1649 can easily see pdb's full documentation with "import pdb;pdb.help()"
1655 1650 at a prompt.
1656 1651
1657 1652 -p: run program under the control of the Python profiler module (which
1658 1653 prints a detailed report of execution times, function calls, etc).
1659 1654
1660 1655 You can pass other options after -p which affect the behavior of the
1661 1656 profiler itself. See the docs for %prun for details.
1662 1657
1663 1658 In this mode, the program's variables do NOT propagate back to the
1664 1659 IPython interactive namespace (because they remain in the namespace
1665 1660 where the profiler executes them).
1666 1661
1667 1662 Internally this triggers a call to %prun, see its documentation for
1668 1663 details on the options available specifically for profiling.
1669 1664
1670 1665 There is one special usage for which the text above doesn't apply:
1671 1666 if the filename ends with .ipy, the file is run as ipython script,
1672 1667 just as if the commands were written on IPython prompt.
1673 1668
1674 1669 **%runlog**::
1675 1670
1676 1671 Run files as logs.
1677 1672
1678 1673 Usage:\
1679 1674 %runlog file1 file2 ...
1680 1675
1681 1676 Run the named files (treating them as log files) in sequence inside
1682 1677 the interpreter, and return to the prompt. This is much slower than
1683 1678 %run because each line is executed in a try/except block, but it
1684 1679 allows running files with syntax errors in them.
1685 1680
1686 1681 Normally IPython will guess when a file is one of its own logfiles, so
1687 1682 you can typically use %run even for logs. This shorthand allows you to
1688 1683 force any file to be treated as a log file.
1689 1684
1690 1685 **%save**::
1691 1686
1692 1687 Save a set of lines to a given filename.
1693 1688
1694 1689 Usage:\
1695 1690 %save [options] filename n1-n2 n3-n4 ... n5 .. n6 ...
1696 1691
1697 1692 Options:
1698 1693
1699 1694 -r: use 'raw' input. By default, the 'processed' history is used,
1700 1695 so that magics are loaded in their transformed version to valid
1701 1696 Python. If this option is given, the raw input as typed as the
1702 1697 command line is used instead.
1703 1698
1704 1699 This function uses the same syntax as %macro for line extraction, but
1705 1700 instead of creating a macro it saves the resulting string to the
1706 1701 filename you specify.
1707 1702
1708 1703 It adds a '.py' extension to the file if you don't do so yourself, and
1709 1704 it asks for confirmation before overwriting existing files.
1710 1705
1711 1706 **%sc**::
1712 1707
1713 1708 Shell capture - execute a shell command and capture its output.
1714 1709
1715 1710 DEPRECATED. Suboptimal, retained for backwards compatibility.
1716 1711
1717 1712 You should use the form 'var = !command' instead. Example:
1718 1713
1719 1714 "%sc -l myfiles = ls ~" should now be written as
1720 1715
1721 1716 "myfiles = !ls ~"
1722 1717
1723 1718 myfiles.s, myfiles.l and myfiles.n still apply as documented
1724 1719 below.
1725 1720
1726 1721 --
1727 1722 %sc [options] varname=command
1728 1723
1729 1724 IPython will run the given command using commands.getoutput(), and
1730 1725 will then update the user's interactive namespace with a variable
1731 1726 called varname, containing the value of the call. Your command can
1732 1727 contain shell wildcards, pipes, etc.
1733 1728
1734 1729 The '=' sign in the syntax is mandatory, and the variable name you
1735 1730 supply must follow Python's standard conventions for valid names.
1736 1731
1737 1732 (A special format without variable name exists for internal use)
1738 1733
1739 1734 Options:
1740 1735
1741 1736 -l: list output. Split the output on newlines into a list before
1742 1737 assigning it to the given variable. By default the output is stored
1743 1738 as a single string.
1744 1739
1745 1740 -v: verbose. Print the contents of the variable.
1746 1741
1747 1742 In most cases you should not need to split as a list, because the
1748 1743 returned value is a special type of string which can automatically
1749 1744 provide its contents either as a list (split on newlines) or as a
1750 1745 space-separated string. These are convenient, respectively, either
1751 1746 for sequential processing or to be passed to a shell command.
1752 1747
1753 1748 For example:
1754 1749
1755 1750 # Capture into variable a
1756 1751 In [9]: sc a=ls *py
1757 1752
1758 1753 # a is a string with embedded newlines
1759 1754 In [10]: a
1760 1755 Out[10]: 'setup.py win32_manual_post_install.py'
1761 1756
1762 1757 # which can be seen as a list:
1763 1758 In [11]: a.l
1764 1759 Out[11]: ['setup.py', 'win32_manual_post_install.py']
1765 1760
1766 1761 # or as a whitespace-separated string:
1767 1762 In [12]: a.s
1768 1763 Out[12]: 'setup.py win32_manual_post_install.py'
1769 1764
1770 1765 # a.s is useful to pass as a single command line:
1771 1766 In [13]: !wc -l $a.s
1772 1767 146 setup.py
1773 1768 130 win32_manual_post_install.py
1774 1769 276 total
1775 1770
1776 1771 # while the list form is useful to loop over:
1777 1772 In [14]: for f in a.l:
1778 1773 ....: !wc -l $f
1779 1774 ....:
1780 1775 146 setup.py
1781 1776 130 win32_manual_post_install.py
1782 1777
1783 1778 Similiarly, the lists returned by the -l option are also special, in
1784 1779 the sense that you can equally invoke the .s attribute on them to
1785 1780 automatically get a whitespace-separated string from their contents:
1786 1781
1787 1782 In [1]: sc -l b=ls *py
1788 1783
1789 1784 In [2]: b
1790 1785 Out[2]: ['setup.py', 'win32_manual_post_install.py']
1791 1786
1792 1787 In [3]: b.s
1793 1788 Out[3]: 'setup.py win32_manual_post_install.py'
1794 1789
1795 1790 In summary, both the lists and strings used for ouptut capture have
1796 1791 the following special attributes:
1797 1792
1798 1793 .l (or .list) : value as list.
1799 1794 .n (or .nlstr): value as newline-separated string.
1800 1795 .s (or .spstr): value as space-separated string.
1801 1796
1802 1797 **%store**::
1803 1798
1804 1799 Lightweight persistence for python variables.
1805 1800
1806 1801 Example:
1807 1802
1808 1803 ville@badger[~]|1> A = ['hello',10,'world']\
1809 1804 ville@badger[~]|2> %store A\
1810 1805 ville@badger[~]|3> Exit
1811 1806
1812 1807 (IPython session is closed and started again...)
1813 1808
1814 1809 ville@badger:~$ ipython -p pysh\
1815 1810 ville@badger[~]|1> print A
1816 1811
1817 1812 ['hello', 10, 'world']
1818 1813
1819 1814 Usage:
1820 1815
1821 1816 %store - Show list of all variables and their current values\
1822 1817 %store <var> - Store the *current* value of the variable to disk\
1823 1818 %store -d <var> - Remove the variable and its value from storage\
1824 1819 %store -z - Remove all variables from storage\
1825 1820 %store -r - Refresh all variables from store (delete current vals)\
1826 1821 %store foo >a.txt - Store value of foo to new file a.txt\
1827 1822 %store foo >>a.txt - Append value of foo to file a.txt\
1828 1823
1829 1824 It should be noted that if you change the value of a variable, you
1830 1825 need to %store it again if you want to persist the new value.
1831 1826
1832 1827 Note also that the variables will need to be pickleable; most basic
1833 1828 python types can be safely %stored.
1834 1829
1835 1830 Also aliases can be %store'd across sessions.
1836 1831
1837 1832 **%sx**::
1838 1833
1839 1834 Shell execute - run a shell command and capture its output.
1840 1835
1841 1836 %sx command
1842 1837
1843 1838 IPython will run the given command using commands.getoutput(), and
1844 1839 return the result formatted as a list (split on '\n'). Since the
1845 1840 output is _returned_, it will be stored in ipython's regular output
1846 1841 cache Out[N] and in the '_N' automatic variables.
1847 1842
1848 1843 Notes:
1849 1844
1850 1845 1) If an input line begins with '!!', then %sx is automatically
1851 1846 invoked. That is, while:
1852 1847 !ls
1853 1848 causes ipython to simply issue system('ls'), typing
1854 1849 !!ls
1855 1850 is a shorthand equivalent to:
1856 1851 %sx ls
1857 1852
1858 1853 2) %sx differs from %sc in that %sx automatically splits into a list,
1859 1854 like '%sc -l'. The reason for this is to make it as easy as possible
1860 1855 to process line-oriented shell output via further python commands.
1861 1856 %sc is meant to provide much finer control, but requires more
1862 1857 typing.
1863 1858
1864 1859 3) Just like %sc -l, this is a list with special attributes:
1865 1860
1866 1861 .l (or .list) : value as list.
1867 1862 .n (or .nlstr): value as newline-separated string.
1868 1863 .s (or .spstr): value as whitespace-separated string.
1869 1864
1870 1865 This is very useful when trying to use such lists as arguments to
1871 1866 system commands.
1872 1867
1873 1868 **%system_verbose**::
1874 1869
1875 1870 Set verbose printing of system calls.
1876 1871
1877 1872 If called without an argument, act as a toggle
1878 1873
1879 1874 **%time**::
1880 1875
1881 1876 Time execution of a Python statement or expression.
1882 1877
1883 1878 The CPU and wall clock times are printed, and the value of the
1884 1879 expression (if any) is returned. Note that under Win32, system time
1885 1880 is always reported as 0, since it can not be measured.
1886 1881
1887 1882 This function provides very basic timing functionality. In Python
1888 1883 2.3, the timeit module offers more control and sophistication, so this
1889 1884 could be rewritten to use it (patches welcome).
1890 1885
1891 1886 Some examples:
1892 1887
1893 1888 In [1]: time 2**128
1894 1889 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1895 1890 Wall time: 0.00
1896 1891 Out[1]: 340282366920938463463374607431768211456L
1897 1892
1898 1893 In [2]: n = 1000000
1899 1894
1900 1895 In [3]: time sum(range(n))
1901 1896 CPU times: user 1.20 s, sys: 0.05 s, total: 1.25 s
1902 1897 Wall time: 1.37
1903 1898 Out[3]: 499999500000L
1904 1899
1905 1900 In [4]: time print 'hello world'
1906 1901 hello world
1907 1902 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1908 1903 Wall time: 0.00
1909 1904
1910 1905 Note that the time needed by Python to compile the given expression
1911 1906 will be reported if it is more than 0.1s. In this example, the
1912 1907 actual exponentiation is done by Python at compilation time, so while
1913 1908 the expression can take a noticeable amount of time to compute, that
1914 1909 time is purely due to the compilation:
1915 1910
1916 1911 In [5]: time 3**9999;
1917 1912 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1918 1913 Wall time: 0.00 s
1919 1914
1920 1915 In [6]: time 3**999999;
1921 1916 CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s
1922 1917 Wall time: 0.00 s
1923 1918 Compiler : 0.78 s
1924 1919
1925 1920 **%timeit**::
1926 1921
1927 1922 Time execution of a Python statement or expression
1928 1923
1929 1924 Usage:\
1930 1925 %timeit [-n<N> -r<R> [-t|-c]] statement
1931 1926
1932 1927 Time execution of a Python statement or expression using the timeit
1933 1928 module.
1934 1929
1935 1930 Options:
1936 1931 -n<N>: execute the given statement <N> times in a loop. If this value
1937 1932 is not given, a fitting value is chosen.
1938 1933
1939 1934 -r<R>: repeat the loop iteration <R> times and take the best result.
1940 1935 Default: 3
1941 1936
1942 1937 -t: use time.time to measure the time, which is the default on Unix.
1943 1938 This function measures wall time.
1944 1939
1945 1940 -c: use time.clock to measure the time, which is the default on
1946 1941 Windows and measures wall time. On Unix, resource.getrusage is used
1947 1942 instead and returns the CPU user time.
1948 1943
1949 1944 -p<P>: use a precision of <P> digits to display the timing result.
1950 1945 Default: 3
1951 1946
1952 1947
1953 1948 Examples:\
1954 1949 In [1]: %timeit pass
1955 1950 10000000 loops, best of 3: 53.3 ns per loop
1956 1951
1957 1952 In [2]: u = None
1958 1953
1959 1954 In [3]: %timeit u is None
1960 1955 10000000 loops, best of 3: 184 ns per loop
1961 1956
1962 1957 In [4]: %timeit -r 4 u == None
1963 1958 1000000 loops, best of 4: 242 ns per loop
1964 1959
1965 1960 In [5]: import time
1966 1961
1967 1962 In [6]: %timeit -n1 time.sleep(2)
1968 1963 1 loops, best of 3: 2 s per loop
1969 1964
1970 1965
1971 1966 The times reported by %timeit will be slightly higher than those
1972 1967 reported by the timeit.py script when variables are accessed. This is
1973 1968 due to the fact that %timeit executes the statement in the namespace
1974 1969 of the shell, compared with timeit.py, which uses a single setup
1975 1970 statement to import function or create variables. Generally, the bias
1976 1971 does not matter as long as results from timeit.py are not mixed with
1977 1972 those from %timeit.
1978 1973
1979 1974 **%unalias**::
1980 1975
1981 1976 Remove an alias
1982 1977
1983 1978 **%upgrade**::
1984 1979
1985 1980 Upgrade your IPython installation
1986 1981
1987 1982 This will copy the config files that don't yet exist in your
1988 1983 ipython dir from the system config dir. Use this after upgrading
1989 1984 IPython if you don't wish to delete your .ipython dir.
1990 1985
1991 1986 Call with -nolegacy to get rid of ipythonrc* files (recommended for
1992 1987 new users)
1993 1988
1994 1989 **%which**::
1995 1990
1996 1991 %which <cmd> => search PATH for files matching cmd. Also scans aliases.
1997 1992
1998 1993 Traverses PATH and prints all files (not just executables!) that match the
1999 1994 pattern on command line. Probably more useful in finding stuff
2000 1995 interactively than 'which', which only prints the first matching item.
2001 1996
2002 1997 Also discovers and expands aliases, so you'll see what will be executed
2003 1998 when you call an alias.
2004 1999
2005 2000 Example:
2006 2001
2007 2002 [~]|62> %which d
2008 2003 d -> ls -F --color=auto
2009 2004 == c:\cygwin\bin\ls.exe
2010 2005 c:\cygwin\bin\d.exe
2011 2006
2012 2007 [~]|64> %which diff*
2013 2008 diff3 -> diff3
2014 2009 == c:\cygwin\bin\diff3.exe
2015 2010 diff -> diff
2016 2011 == c:\cygwin\bin\diff.exe
2017 2012 c:\cygwin\bin\diff.exe
2018 2013 c:\cygwin\bin\diff3.exe
2019 2014
2020 2015 **%who**::
2021 2016
2022 2017 Print all interactive variables, with some minimal formatting.
2023 2018
2024 2019 If any arguments are given, only variables whose type matches one of
2025 2020 these are printed. For example:
2026 2021
2027 2022 %who function str
2028 2023
2029 2024 will only list functions and strings, excluding all other types of
2030 2025 variables. To find the proper type names, simply use type(var) at a
2031 2026 command line to see how python prints type names. For example:
2032 2027
2033 2028 In [1]: type('hello')\
2034 2029 Out[1]: <type 'str'>
2035 2030
2036 2031 indicates that the type name for strings is 'str'.
2037 2032
2038 2033 %who always excludes executed names loaded through your configuration
2039 2034 file and things which are internal to IPython.
2040 2035
2041 2036 This is deliberate, as typically you may load many modules and the
2042 2037 purpose of %who is to show you only what you've manually defined.
2043 2038
2044 2039 **%who_ls**::
2045 2040
2046 2041 Return a sorted list of all interactive variables.
2047 2042
2048 2043 If arguments are given, only variables of types matching these
2049 2044 arguments are returned.
2050 2045
2051 2046 **%whos**::
2052 2047
2053 2048 Like %who, but gives some extra information about each variable.
2054 2049
2055 2050 The same type filtering of %who can be applied here.
2056 2051
2057 2052 For all variables, the type is printed. Additionally it prints:
2058 2053
2059 2054 - For {},[],(): their length.
2060 2055
2061 2056 - For numpy and Numeric arrays, a summary with shape, number of
2062 2057 elements, typecode and size in memory.
2063 2058
2064 2059 - Everything else: a string representation, snipping their middle if
2065 2060 too long.
2066 2061
2067 2062 **%xmode**::
2068 2063
2069 2064 Switch modes for the exception handlers.
2070 2065
2071 2066 Valid modes: Plain, Context and Verbose.
2072 2067
2073 2068 If called without arguments, acts as a toggle.
2074 2069
2075 2070 .. magic_end
2076 2071
2077 2072 Access to the standard Python help
2078 2073 ----------------------------------
2079 2074
2080 As of Python 2.1, a help system is available with access to object
2081 docstrings and the Python manuals. Simply type 'help' (no quotes) to
2082 access it. You can also type help(object) to obtain information about a
2083 given object, and help('keyword') for information on a keyword. As noted
2084 in sec. `accessing help`_, you need to properly configure
2085 your environment variable PYTHONDOCS for this feature to work correctly.
2075 As of Python 2.1, a help system is available with access to object docstrings
2076 and the Python manuals. Simply type 'help' (no quotes) to access it. You can
2077 also type help(object) to obtain information about a given object, and
2078 help('keyword') for information on a keyword. As noted :ref:`here
2079 <accessing_help>`, you need to properly configure your environment variable
2080 PYTHONDOCS for this feature to work correctly.
2086 2081
2082 .. _dynamic_object_info:
2087 2083
2088 2084 Dynamic object information
2089 2085 --------------------------
2090 2086
2091 2087 Typing ?word or word? prints detailed information about an object. If
2092 2088 certain strings in the object are too long (docstrings, code, etc.) they
2093 2089 get snipped in the center for brevity. This system gives access variable
2094 2090 types and values, full source code for any object (if available),
2095 2091 function prototypes and other useful information.
2096 2092
2097 2093 Typing ??word or word?? gives access to the full information without
2098 2094 snipping long strings. Long strings are sent to the screen through the
2099 2095 less pager if longer than the screen and printed otherwise. On systems
2100 2096 lacking the less command, IPython uses a very basic internal pager.
2101 2097
2102 2098 The following magic functions are particularly useful for gathering
2103 2099 information about your working environment. You can get more details by
2104 2100 typing %magic or querying them individually (use %function_name? with or
2105 2101 without the %), this is just a summary:
2106 2102
2107 2103 * **%pdoc <object>**: Print (or run through a pager if too long) the
2108 2104 docstring for an object. If the given object is a class, it will
2109 2105 print both the class and the constructor docstrings.
2110 2106 * **%pdef <object>**: Print the definition header for any callable
2111 2107 object. If the object is a class, print the constructor information.
2112 2108 * **%psource <object>**: Print (or run through a pager if too long)
2113 2109 the source code for an object.
2114 2110 * **%pfile <object>**: Show the entire source file where an object was
2115 2111 defined via a pager, opening it at the line where the object
2116 2112 definition begins.
2117 2113 * **%who/%whos**: These functions give information about identifiers
2118 2114 you have defined interactively (not things you loaded or defined
2119 2115 in your configuration files). %who just prints a list of
2120 2116 identifiers and %whos prints a table with some basic details about
2121 2117 each identifier.
2122 2118
2123 2119 Note that the dynamic object information functions (?/??, %pdoc, %pfile,
2124 2120 %pdef, %psource) give you access to documentation even on things which
2125 2121 are not really defined as separate identifiers. Try for example typing
2126 2122 {}.get? or after doing import os, type os.path.abspath??.
2127 2123
2128 2124
2129 .. _Readline:
2125 .. _readline:
2130 2126
2131 2127 Readline-based features
2132 2128 -----------------------
2133 2129
2134 2130 These features require the GNU readline library, so they won't work if
2135 2131 your Python installation lacks readline support. We will first describe
2136 2132 the default behavior IPython uses, and then how to change it to suit
2137 2133 your preferences.
2138 2134
2139 2135
2140 2136 Command line completion
2141 2137 +++++++++++++++++++++++
2142 2138
2143 2139 At any time, hitting TAB will complete any available python commands or
2144 2140 variable names, and show you a list of the possible completions if
2145 2141 there's no unambiguous one. It will also complete filenames in the
2146 2142 current directory if no python names match what you've typed so far.
2147 2143
2148 2144
2149 2145 Search command history
2150 2146 ++++++++++++++++++++++
2151 2147
2152 2148 IPython provides two ways for searching through previous input and thus
2153 2149 reduce the need for repetitive typing:
2154 2150
2155 2151 1. Start typing, and then use Ctrl-p (previous,up) and Ctrl-n
2156 2152 (next,down) to search through only the history items that match
2157 2153 what you've typed so far. If you use Ctrl-p/Ctrl-n at a blank
2158 2154 prompt, they just behave like normal arrow keys.
2159 2155 2. Hit Ctrl-r: opens a search prompt. Begin typing and the system
2160 2156 searches your history for lines that contain what you've typed so
2161 2157 far, completing as much as it can.
2162 2158
2163 2159
2164 2160 Persistent command history across sessions
2165 2161 ++++++++++++++++++++++++++++++++++++++++++
2166 2162
2167 2163 IPython will save your input history when it leaves and reload it next
2168 2164 time you restart it. By default, the history file is named
2169 2165 $IPYTHONDIR/history, but if you've loaded a named profile,
2170 2166 '-PROFILE_NAME' is appended to the name. This allows you to keep
2171 2167 separate histories related to various tasks: commands related to
2172 2168 numerical work will not be clobbered by a system shell history, for
2173 2169 example.
2174 2170
2175 2171
2176 2172 Autoindent
2177 2173 ++++++++++
2178 2174
2179 2175 IPython can recognize lines ending in ':' and indent the next line,
2180 2176 while also un-indenting automatically after 'raise' or 'return'.
2181 2177
2182 2178 This feature uses the readline library, so it will honor your ~/.inputrc
2183 2179 configuration (or whatever file your INPUTRC variable points to). Adding
2184 2180 the following lines to your .inputrc file can make indenting/unindenting
2185 2181 more convenient (M-i indents, M-u unindents)::
2186 2182
2187 2183 $if Python
2188 2184 "\M-i": " "
2189 2185 "\M-u": "\d\d\d\d"
2190 2186 $endif
2191 2187
2192 2188 Note that there are 4 spaces between the quote marks after "M-i" above.
2193 2189
2194 2190 Warning: this feature is ON by default, but it can cause problems with
2195 2191 the pasting of multi-line indented code (the pasted code gets
2196 2192 re-indented on each line). A magic function %autoindent allows you to
2197 2193 toggle it on/off at runtime. You can also disable it permanently on in
2198 2194 your ipythonrc file (set autoindent 0).
2199 2195
2200 2196
2201 2197 Customizing readline behavior
2202 2198 +++++++++++++++++++++++++++++
2203 2199
2204 2200 All these features are based on the GNU readline library, which has an
2205 2201 extremely customizable interface. Normally, readline is configured via a
2206 2202 file which defines the behavior of the library; the details of the
2207 2203 syntax for this can be found in the readline documentation available
2208 2204 with your system or on the Internet. IPython doesn't read this file (if
2209 2205 it exists) directly, but it does support passing to readline valid
2210 2206 options via a simple interface. In brief, you can customize readline by
2211 2207 setting the following options in your ipythonrc configuration file (note
2212 2208 that these options can not be specified at the command line):
2213 2209
2214 2210 * **readline_parse_and_bind**: this option can appear as many times as
2215 2211 you want, each time defining a string to be executed via a
2216 2212 readline.parse_and_bind() command. The syntax for valid commands
2217 2213 of this kind can be found by reading the documentation for the GNU
2218 2214 readline library, as these commands are of the kind which readline
2219 2215 accepts in its configuration file.
2220 2216 * **readline_remove_delims**: a string of characters to be removed
2221 2217 from the default word-delimiters list used by readline, so that
2222 2218 completions may be performed on strings which contain them. Do not
2223 2219 change the default value unless you know what you're doing.
2224 2220 * **readline_omit__names**: when tab-completion is enabled, hitting
2225 2221 <tab> after a '.' in a name will complete all attributes of an
2226 2222 object, including all the special methods whose names include
2227 2223 double underscores (like __getitem__ or __class__). If you'd
2228 2224 rather not see these names by default, you can set this option to
2229 2225 1. Note that even when this option is set, you can still see those
2230 2226 names by explicitly typing a _ after the period and hitting <tab>:
2231 2227 'name._<tab>' will always complete attribute names starting with '_'.
2232 2228
2233 2229 This option is off by default so that new users see all
2234 2230 attributes of any objects they are dealing with.
2235 2231
2236 2232 You will find the default values along with a corresponding detailed
2237 2233 explanation in your ipythonrc file.
2238 2234
2239 2235
2240 2236 Session logging and restoring
2241 2237 -----------------------------
2242 2238
2243 You can log all input from a session either by starting IPython with
2244 the command line switches -log or -logfile (see sec. `command line
2245 options`_) or by activating the logging at any moment with the magic
2246 function %logstart.
2239 You can log all input from a session either by starting IPython with the
2240 command line switches -log or -logfile (see :ref:`here <command_line_options>`)
2241 or by activating the logging at any moment with the magic function %logstart.
2247 2242
2248 2243 Log files can later be reloaded with the -logplay option and IPython
2249 2244 will attempt to 'replay' the log by executing all the lines in it, thus
2250 2245 restoring the state of a previous session. This feature is not quite
2251 2246 perfect, but can still be useful in many cases.
2252 2247
2253 2248 The log files can also be used as a way to have a permanent record of
2254 2249 any code you wrote while experimenting. Log files are regular text files
2255 2250 which you can later open in your favorite text editor to extract code or
2256 2251 to 'clean them up' before using them to replay a session.
2257 2252
2258 2253 The %logstart function for activating logging in mid-session is used as
2259 2254 follows:
2260 2255
2261 2256 %logstart [log_name [log_mode]]
2262 2257
2263 2258 If no name is given, it defaults to a file named 'log' in your
2264 2259 IPYTHONDIR directory, in 'rotate' mode (see below).
2265 2260
2266 2261 '%logstart name' saves to file 'name' in 'backup' mode. It saves your
2267 2262 history up to that point and then continues logging.
2268 2263
2269 2264 %logstart takes a second optional parameter: logging mode. This can be
2270 2265 one of (note that the modes are given unquoted):
2271 2266
2272 2267 * [over:] overwrite existing log_name.
2273 2268 * [backup:] rename (if exists) to log_name~ and start log_name.
2274 2269 * [append:] well, that says it.
2275 2270 * [rotate:] create rotating logs log_name.1~, log_name.2~, etc.
2276 2271
2277 2272 The %logoff and %logon functions allow you to temporarily stop and
2278 2273 resume logging to a file which had previously been started with
2279 2274 %logstart. They will fail (with an explanation) if you try to use them
2280 2275 before logging has been started.
2281 2276
2277 .. _system_shell_access:
2278
2282 2279 System shell access
2283 2280 -------------------
2284 2281
2285 2282 Any input line beginning with a ! character is passed verbatim (minus
2286 2283 the !, of course) to the underlying operating system. For example,
2287 2284 typing !ls will run 'ls' in the current directory.
2288 2285
2289 2286 Manual capture of command output
2290 2287 --------------------------------
2291 2288
2292 2289 If the input line begins with two exclamation marks, !!, the command is
2293 2290 executed but its output is captured and returned as a python list, split
2294 2291 on newlines. Any output sent by the subprocess to standard error is
2295 2292 printed separately, so that the resulting list only captures standard
2296 2293 output. The !! syntax is a shorthand for the %sx magic command.
2297 2294
2298 2295 Finally, the %sc magic (short for 'shell capture') is similar to %sx,
2299 2296 but allowing more fine-grained control of the capture details, and
2300 2297 storing the result directly into a named variable. The direct use of
2301 2298 %sc is now deprecated, and you should ise the ``var = !cmd`` syntax
2302 2299 instead.
2303 2300
2304 2301 IPython also allows you to expand the value of python variables when
2305 2302 making system calls. Any python variable or expression which you prepend
2306 2303 with $ will get expanded before the system call is made::
2307 2304
2308 2305 In [1]: pyvar='Hello world'
2309 2306 In [2]: !echo "A python variable: $pyvar"
2310 2307 A python variable: Hello world
2311 2308
2312 2309 If you want the shell to actually see a literal $, you need to type it
2313 2310 twice::
2314 2311
2315 2312 In [3]: !echo "A system variable: $$HOME"
2316 2313 A system variable: /home/fperez
2317 2314
2318 2315 You can pass arbitrary expressions, though you'll need to delimit them
2319 2316 with {} if there is ambiguity as to the extent of the expression::
2320 2317
2321 2318 In [5]: x=10
2322 2319 In [6]: y=20
2323 2320 In [13]: !echo $x+y
2324 2321 10+y
2325 2322 In [7]: !echo ${x+y}
2326 2323 30
2327 2324
2328 2325 Even object attributes can be expanded::
2329 2326
2330 2327 In [12]: !echo $sys.argv
2331 2328 [/home/fperez/usr/bin/ipython]
2332 2329
2333 2330
2334 2331 System command aliases
2335 2332 ----------------------
2336 2333
2337 2334 The %alias magic function and the alias option in the ipythonrc
2338 2335 configuration file allow you to define magic functions which are in fact
2339 2336 system shell commands. These aliases can have parameters.
2340 2337
2341 2338 '%alias alias_name cmd' defines 'alias_name' as an alias for 'cmd'
2342 2339
2343 2340 Then, typing '%alias_name params' will execute the system command 'cmd
2344 2341 params' (from your underlying operating system).
2345 2342
2346 2343 You can also define aliases with parameters using %s specifiers (one per
2347 2344 parameter). The following example defines the %parts function as an
2348 2345 alias to the command 'echo first %s second %s' where each %s will be
2349 2346 replaced by a positional parameter to the call to %parts::
2350 2347
2351 2348 In [1]: alias parts echo first %s second %s
2352 2349 In [2]: %parts A B
2353 2350 first A second B
2354 2351 In [3]: %parts A
2355 2352 Incorrect number of arguments: 2 expected.
2356 2353 parts is an alias to: 'echo first %s second %s'
2357 2354
2358 2355 If called with no parameters, %alias prints the table of currently
2359 2356 defined aliases.
2360 2357
2361 2358 The %rehash/rehashx magics allow you to load your entire $PATH as
2362 2359 ipython aliases. See their respective docstrings (or sec. 6.2
2363 2360 <#sec:magic> for further details).
2364 2361
2365 2362
2366 2363 .. _dreload:
2367 2364
2368 2365 Recursive reload
2369 2366 ----------------
2370 2367
2371 2368 The dreload function does a recursive reload of a module: changes made
2372 2369 to the module since you imported will actually be available without
2373 2370 having to exit.
2374 2371
2375 2372
2376 2373 Verbose and colored exception traceback printouts
2377 2374 -------------------------------------------------
2378 2375
2379 2376 IPython provides the option to see very detailed exception tracebacks,
2380 2377 which can be especially useful when debugging large programs. You can
2381 2378 run any Python file with the %run function to benefit from these
2382 2379 detailed tracebacks. Furthermore, both normal and verbose tracebacks can
2383 2380 be colored (if your terminal supports it) which makes them much easier
2384 2381 to parse visually.
2385 2382
2386 2383 See the magic xmode and colors functions for details (just type %magic).
2387 2384
2388 2385 These features are basically a terminal version of Ka-Ping Yee's cgitb
2389 2386 module, now part of the standard Python library.
2390 2387
2391 2388
2392 .. _Input caching:
2389 .. _input_caching:
2393 2390
2394 2391 Input caching system
2395 2392 --------------------
2396 2393
2397 IPython offers numbered prompts (In/Out) with input and output caching.
2398 All input is saved and can be retrieved as variables (besides the usual
2399 arrow key recall).
2394 IPython offers numbered prompts (In/Out) with input and output caching
2395 (also referred to as 'input history'). All input is saved and can be
2396 retrieved as variables (besides the usual arrow key recall), in
2397 addition to the %rep magic command that brings a history entry
2398 up for editing on the next command line.
2400 2399
2401 2400 The following GLOBAL variables always exist (so don't overwrite them!):
2402 2401 _i: stores previous input. _ii: next previous. _iii: next-next previous.
2403 2402 _ih : a list of all input _ih[n] is the input from line n and this list
2404 2403 is aliased to the global variable In. If you overwrite In with a
2405 2404 variable of your own, you can remake the assignment to the internal list
2406 2405 with a simple 'In=_ih'.
2407 2406
2408 2407 Additionally, global variables named _i<n> are dynamically created (<n>
2409 2408 being the prompt counter), such that
2410 2409 _i<n> == _ih[<n>] == In[<n>].
2411 2410
2412 2411 For example, what you typed at prompt 14 is available as _i14, _ih[14]
2413 2412 and In[14].
2414 2413
2415 2414 This allows you to easily cut and paste multi line interactive prompts
2416 2415 by printing them out: they print like a clean string, without prompt
2417 2416 characters. You can also manipulate them like regular variables (they
2418 2417 are strings), modify or exec them (typing 'exec _i9' will re-execute the
2419 2418 contents of input prompt 9, 'exec In[9:14]+In[18]' will re-execute lines
2420 2419 9 through 13 and line 18).
2421 2420
2422 2421 You can also re-execute multiple lines of input easily by using the
2423 2422 magic %macro function (which automates the process and allows
2424 2423 re-execution without having to type 'exec' every time). The macro system
2425 2424 also allows you to re-execute previous lines which include magic
2426 2425 function calls (which require special processing). Type %macro? or see
2427 2426 sec. 6.2 <#sec:magic> for more details on the macro system.
2428 2427
2429 2428 A history function %hist allows you to see any part of your input
2430 2429 history by printing a range of the _i variables.
2431 2430
2432 .. _Output caching:
2431 You can also search ('grep') through your history by typing
2432 '%hist -g somestring'. This also searches through the so called *shadow history*,
2433 which remembers all the commands (apart from multiline code blocks)
2434 you have ever entered. Handy for searching for svn/bzr URL's, IP adrresses
2435 etc. You can bring shadow history entries listed by '%hist -g' up for editing
2436 (or re-execution by just pressing ENTER) with %rep command. Shadow history
2437 entries are not available as _iNUMBER variables, and they are identified by
2438 the '0' prefix in %hist -g output. That is, history entry 12 is a normal
2439 history entry, but 0231 is a shadow history entry.
2440
2441 Shadow history was added because the readline history is inherently very
2442 unsafe - if you have multiple IPython sessions open, the last session
2443 to close will overwrite the history of previountly closed session. Likewise,
2444 if a crash occurs, history is never saved, whereas shadow history entries
2445 are added after entering every command (so a command executed
2446 in another IPython session is immediately available in other IPython
2447 sessions that are open).
2448
2449 To conserve space, a command can exist in shadow history only once - it doesn't
2450 make sense to store a common line like "cd .." a thousand times. The idea is
2451 mainly to provide a reliable place where valuable, hard-to-remember commands can
2452 always be retrieved, as opposed to providing an exact sequence of commands
2453 you have entered in actual order.
2454
2455 Because shadow history has all the commands you have ever executed,
2456 time taken by %hist -g will increase oven time. If it ever starts to take
2457 too long (or it ends up containing sensitive information like passwords),
2458 clear the shadow history by `%clear shadow_nuke`.
2459
2460 Time taken to add entries to shadow history should be negligible, but
2461 in any case, if you start noticing performance degradation after using
2462 IPython for a long time (or running a script that floods the shadow history!),
2463 you can 'compress' the shadow history by executing
2464 `%clear shadow_compress`. In practice, this should never be necessary
2465 in normal use.
2466
2467 .. _output_caching:
2433 2468
2434 2469 Output caching system
2435 2470 ---------------------
2436 2471
2437 2472 For output that is returned from actions, a system similar to the input
2438 2473 cache exists but using _ instead of _i. Only actions that produce a
2439 2474 result (NOT assignments, for example) are cached. If you are familiar
2440 2475 with Mathematica, IPython's _ variables behave exactly like
2441 2476 Mathematica's % variables.
2442 2477
2443 2478 The following GLOBAL variables always exist (so don't overwrite them!):
2444 2479
2445 2480 * [_] (a single underscore) : stores previous output, like Python's
2446 2481 default interpreter.
2447 2482 * [__] (two underscores): next previous.
2448 2483 * [___] (three underscores): next-next previous.
2449 2484
2450 2485 Additionally, global variables named _<n> are dynamically created (<n>
2451 2486 being the prompt counter), such that the result of output <n> is always
2452 2487 available as _<n> (don't use the angle brackets, just the number, e.g.
2453 2488 _21).
2454 2489
2455 2490 These global variables are all stored in a global dictionary (not a
2456 2491 list, since it only has entries for lines which returned a result)
2457 2492 available under the names _oh and Out (similar to _ih and In). So the
2458 2493 output from line 12 can be obtained as _12, Out[12] or _oh[12]. If you
2459 2494 accidentally overwrite the Out variable you can recover it by typing
2460 2495 'Out=_oh' at the prompt.
2461 2496
2462 2497 This system obviously can potentially put heavy memory demands on your
2463 2498 system, since it prevents Python's garbage collector from removing any
2464 2499 previously computed results. You can control how many results are kept
2465 2500 in memory with the option (at the command line or in your ipythonrc
2466 2501 file) cache_size. If you set it to 0, the whole system is completely
2467 2502 disabled and the prompts revert to the classic '>>>' of normal Python.
2468 2503
2469 2504
2470 2505 Directory history
2471 2506 -----------------
2472 2507
2473 2508 Your history of visited directories is kept in the global list _dh, and
2474 2509 the magic %cd command can be used to go to any entry in that list. The
2475 %dhist command allows you to view this history. do ``cd -<TAB`` to
2510 %dhist command allows you to view this history. Do ``cd -<TAB`` to
2476 2511 conventiently view the directory history.
2477 2512
2478 2513
2479 2514 Automatic parentheses and quotes
2480 2515 --------------------------------
2481 2516
2482 2517 These features were adapted from Nathan Gray's LazyPython. They are
2483 2518 meant to allow less typing for common situations.
2484 2519
2485 2520
2486 2521 Automatic parentheses
2487 2522 ---------------------
2488 2523
2489 2524 Callable objects (i.e. functions, methods, etc) can be invoked like this
2490 2525 (notice the commas between the arguments)::
2491 2526
2492 2527 >>> callable_ob arg1, arg2, arg3
2493 2528
2494 2529 and the input will be translated to this::
2495 2530
2496 2531 -> callable_ob(arg1, arg2, arg3)
2497 2532
2498 2533 You can force automatic parentheses by using '/' as the first character
2499 2534 of a line. For example::
2500 2535
2501 2536 >>> /globals # becomes 'globals()'
2502 2537
2503 2538 Note that the '/' MUST be the first character on the line! This won't work::
2504 2539
2505 2540 >>> print /globals # syntax error
2506 2541
2507 2542 In most cases the automatic algorithm should work, so you should rarely
2508 2543 need to explicitly invoke /. One notable exception is if you are trying
2509 2544 to call a function with a list of tuples as arguments (the parenthesis
2510 2545 will confuse IPython)::
2511 2546
2512 2547 In [1]: zip (1,2,3),(4,5,6) # won't work
2513 2548
2514 2549 but this will work::
2515 2550
2516 2551 In [2]: /zip (1,2,3),(4,5,6)
2517 2552 ---> zip ((1,2,3),(4,5,6))
2518 2553 Out[2]= [(1, 4), (2, 5), (3, 6)]
2519 2554
2520 2555 IPython tells you that it has altered your command line by displaying
2521 2556 the new command line preceded by ->. e.g.::
2522 2557
2523 2558 In [18]: callable list
2524 2559 ----> callable (list)
2525 2560
2526 2561
2527 2562 Automatic quoting
2528 2563 -----------------
2529 2564
2530 2565 You can force automatic quoting of a function's arguments by using ','
2531 2566 or ';' as the first character of a line. For example::
2532 2567
2533 2568 >>> ,my_function /home/me # becomes my_function("/home/me")
2534 2569
2535 2570 If you use ';' instead, the whole argument is quoted as a single string
2536 2571 (while ',' splits on whitespace)::
2537 2572
2538 2573 >>> ,my_function a b c # becomes my_function("a","b","c")
2539 2574
2540 2575 >>> ;my_function a b c # becomes my_function("a b c")
2541 2576
2542 2577 Note that the ',' or ';' MUST be the first character on the line! This
2543 2578 won't work::
2544 2579
2545 2580 >>> x = ,my_function /home/me # syntax error
2546 2581
2547 2582 IPython as your default Python environment
2548 2583 ==========================================
2549 2584
2550 2585 Python honors the environment variable PYTHONSTARTUP and will execute at
2551 2586 startup the file referenced by this variable. If you put at the end of
2552 2587 this file the following two lines of code::
2553 2588
2554 2589 import IPython
2555 2590 IPython.Shell.IPShell().mainloop(sys_exit=1)
2556 2591
2557 2592 then IPython will be your working environment anytime you start Python.
2558 2593 The sys_exit=1 is needed to have IPython issue a call to sys.exit() when
2559 2594 it finishes, otherwise you'll be back at the normal Python '>>>'
2560 2595 prompt.
2561 2596
2562 2597 This is probably useful to developers who manage multiple Python
2563 2598 versions and don't want to have correspondingly multiple IPython
2564 2599 versions. Note that in this mode, there is no way to pass IPython any
2565 2600 command-line options, as those are trapped first by Python itself.
2566 2601
2567 2602 .. _Embedding:
2568 2603
2569 2604 Embedding IPython
2570 2605 =================
2571 2606
2572 2607 It is possible to start an IPython instance inside your own Python
2573 2608 programs. This allows you to evaluate dynamically the state of your
2574 2609 code, operate with your variables, analyze them, etc. Note however that
2575 2610 any changes you make to values while in the shell do not propagate back
2576 2611 to the running code, so it is safe to modify your values because you
2577 2612 won't break your code in bizarre ways by doing so.
2578 2613
2579 2614 This feature allows you to easily have a fully functional python
2580 2615 environment for doing object introspection anywhere in your code with a
2581 2616 simple function call. In some cases a simple print statement is enough,
2582 2617 but if you need to do more detailed analysis of a code fragment this
2583 2618 feature can be very valuable.
2584 2619
2585 2620 It can also be useful in scientific computing situations where it is
2586 2621 common to need to do some automatic, computationally intensive part and
2587 2622 then stop to look at data, plots, etc.
2588 2623 Opening an IPython instance will give you full access to your data and
2589 2624 functions, and you can resume program execution once you are done with
2590 2625 the interactive part (perhaps to stop again later, as many times as
2591 2626 needed).
2592 2627
2593 2628 The following code snippet is the bare minimum you need to include in
2594 2629 your Python programs for this to work (detailed examples follow later)::
2595 2630
2596 2631 from IPython.Shell import IPShellEmbed
2597 2632
2598 2633 ipshell = IPShellEmbed()
2599 2634
2600 2635 ipshell() # this call anywhere in your program will start IPython
2601 2636
2602 2637 You can run embedded instances even in code which is itself being run at
2603 2638 the IPython interactive prompt with '%run <filename>'. Since it's easy
2604 2639 to get lost as to where you are (in your top-level IPython or in your
2605 2640 embedded one), it's a good idea in such cases to set the in/out prompts
2606 2641 to something different for the embedded instances. The code examples
2607 2642 below illustrate this.
2608 2643
2609 2644 You can also have multiple IPython instances in your program and open
2610 2645 them separately, for example with different options for data
2611 2646 presentation. If you close and open the same instance multiple times,
2612 2647 its prompt counters simply continue from each execution to the next.
2613 2648
2614 2649 Please look at the docstrings in the Shell.py module for more details on
2615 2650 the use of this system.
2616 2651
2617 2652 The following sample file illustrating how to use the embedding
2618 2653 functionality is provided in the examples directory as example-embed.py.
2619 2654 It should be fairly self-explanatory::
2620 2655
2621 2656
2622 2657 #!/usr/bin/env python
2623 2658
2624 2659 """An example of how to embed an IPython shell into a running program.
2625 2660
2626 2661 Please see the documentation in the IPython.Shell module for more details.
2627 2662
2628 2663 The accompanying file example-embed-short.py has quick code fragments for
2629 2664 embedding which you can cut and paste in your code once you understand how
2630 2665 things work.
2631 2666
2632 2667 The code in this file is deliberately extra-verbose, meant for learning."""
2633 2668
2634 2669 # The basics to get you going:
2635 2670
2636 2671 # IPython sets the __IPYTHON__ variable so you can know if you have nested
2637 2672 # copies running.
2638 2673
2639 2674 # Try running this code both at the command line and from inside IPython (with
2640 2675 # %run example-embed.py)
2641 2676 try:
2642 2677 __IPYTHON__
2643 2678 except NameError:
2644 2679 nested = 0
2645 2680 args = ['']
2646 2681 else:
2647 2682 print "Running nested copies of IPython."
2648 2683 print "The prompts for the nested copy have been modified"
2649 2684 nested = 1
2650 2685 # what the embedded instance will see as sys.argv:
2651 2686 args = ['-pi1','In <\\#>: ','-pi2',' .\\D.: ',
2652 2687 '-po','Out<\\#>: ','-nosep']
2653 2688
2654 2689 # First import the embeddable shell class
2655 2690 from IPython.Shell import IPShellEmbed
2656 2691
2657 2692 # Now create an instance of the embeddable shell. The first argument is a
2658 2693 # string with options exactly as you would type them if you were starting
2659 2694 # IPython at the system command line. Any parameters you want to define for
2660 2695 # configuration can thus be specified here.
2661 2696 ipshell = IPShellEmbed(args,
2662 2697 banner = 'Dropping into IPython',
2663 2698 exit_msg = 'Leaving Interpreter, back to program.')
2664 2699
2665 2700 # Make a second instance, you can have as many as you want.
2666 2701 if nested:
2667 2702 args[1] = 'In2<\\#>'
2668 2703 else:
2669 2704 args = ['-pi1','In2<\\#>: ','-pi2',' .\\D.: ',
2670 2705 '-po','Out<\\#>: ','-nosep']
2671 2706 ipshell2 = IPShellEmbed(args,banner = 'Second IPython instance.')
2672 2707
2673 2708 print '\nHello. This is printed from the main controller program.\n'
2674 2709
2675 2710 # You can then call ipshell() anywhere you need it (with an optional
2676 2711 # message):
2677 2712 ipshell('***Called from top level. '
2678 2713 'Hit Ctrl-D to exit interpreter and continue program.\n'
2679 2714 'Note that if you use %kill_embedded, you can fully deactivate\n'
2680 2715 'This embedded instance so it will never turn on again')
2681 2716
2682 2717 print '\nBack in caller program, moving along...\n'
2683 2718
2684 2719 #---------------------------------------------------------------------------
2685 2720 # More details:
2686 2721
2687 2722 # IPShellEmbed instances don't print the standard system banner and
2688 2723 # messages. The IPython banner (which actually may contain initialization
2689 2724 # messages) is available as <instance>.IP.BANNER in case you want it.
2690 2725
2691 2726 # IPShellEmbed instances print the following information everytime they
2692 2727 # start:
2693 2728
2694 2729 # - A global startup banner.
2695 2730
2696 2731 # - A call-specific header string, which you can use to indicate where in the
2697 2732 # execution flow the shell is starting.
2698 2733
2699 2734 # They also print an exit message every time they exit.
2700 2735
2701 2736 # Both the startup banner and the exit message default to None, and can be set
2702 2737 # either at the instance constructor or at any other time with the
2703 2738 # set_banner() and set_exit_msg() methods.
2704 2739
2705 2740 # The shell instance can be also put in 'dummy' mode globally or on a per-call
2706 2741 # basis. This gives you fine control for debugging without having to change
2707 2742 # code all over the place.
2708 2743
2709 2744 # The code below illustrates all this.
2710 2745
2711 2746
2712 2747 # This is how the global banner and exit_msg can be reset at any point
2713 2748 ipshell.set_banner('Entering interpreter - New Banner')
2714 2749 ipshell.set_exit_msg('Leaving interpreter - New exit_msg')
2715 2750
2716 2751 def foo(m):
2717 2752 s = 'spam'
2718 2753 ipshell('***In foo(). Try @whos, or print s or m:')
2719 2754 print 'foo says m = ',m
2720 2755
2721 2756 def bar(n):
2722 2757 s = 'eggs'
2723 2758 ipshell('***In bar(). Try @whos, or print s or n:')
2724 2759 print 'bar says n = ',n
2725 2760
2726 2761 # Some calls to the above functions which will trigger IPython:
2727 2762 print 'Main program calling foo("eggs")\n'
2728 2763 foo('eggs')
2729 2764
2730 2765 # The shell can be put in 'dummy' mode where calls to it silently return. This
2731 2766 # allows you, for example, to globally turn off debugging for a program with a
2732 2767 # single call.
2733 2768 ipshell.set_dummy_mode(1)
2734 2769 print '\nTrying to call IPython which is now "dummy":'
2735 2770 ipshell()
2736 2771 print 'Nothing happened...'
2737 2772 # The global 'dummy' mode can still be overridden for a single call
2738 2773 print '\nOverriding dummy mode manually:'
2739 2774 ipshell(dummy=0)
2740 2775
2741 2776 # Reactivate the IPython shell
2742 2777 ipshell.set_dummy_mode(0)
2743 2778
2744 2779 print 'You can even have multiple embedded instances:'
2745 2780 ipshell2()
2746 2781
2747 2782 print '\nMain program calling bar("spam")\n'
2748 2783 bar('spam')
2749 2784
2750 2785 print 'Main program finished. Bye!'
2751 2786
2752 2787 #********************** End of file <example-embed.py> ***********************
2753 2788
2754 2789 Once you understand how the system functions, you can use the following
2755 2790 code fragments in your programs which are ready for cut and paste::
2756 2791
2757 2792
2758 2793 """Quick code snippets for embedding IPython into other programs.
2759 2794
2760 2795 See example-embed.py for full details, this file has the bare minimum code for
2761 2796 cut and paste use once you understand how to use the system."""
2762 2797
2763 2798 #---------------------------------------------------------------------------
2764 2799 # This code loads IPython but modifies a few things if it detects it's running
2765 2800 # embedded in another IPython session (helps avoid confusion)
2766 2801
2767 2802 try:
2768 2803 __IPYTHON__
2769 2804 except NameError:
2770 2805 argv = ['']
2771 2806 banner = exit_msg = ''
2772 2807 else:
2773 2808 # Command-line options for IPython (a list like sys.argv)
2774 2809 argv = ['-pi1','In <\\#>:','-pi2',' .\\D.:','-po','Out<\\#>:']
2775 2810 banner = '*** Nested interpreter ***'
2776 2811 exit_msg = '*** Back in main IPython ***'
2777 2812
2778 2813 # First import the embeddable shell class
2779 2814 from IPython.Shell import IPShellEmbed
2780 2815 # Now create the IPython shell instance. Put ipshell() anywhere in your code
2781 2816 # where you want it to open.
2782 2817 ipshell = IPShellEmbed(argv,banner=banner,exit_msg=exit_msg)
2783 2818
2784 2819 #---------------------------------------------------------------------------
2785 2820 # This code will load an embeddable IPython shell always with no changes for
2786 2821 # nested embededings.
2787 2822
2788 2823 from IPython.Shell import IPShellEmbed
2789 2824 ipshell = IPShellEmbed()
2790 2825 # Now ipshell() will open IPython anywhere in the code.
2791 2826
2792 2827 #---------------------------------------------------------------------------
2793 2828 # This code loads an embeddable shell only if NOT running inside
2794 2829 # IPython. Inside IPython, the embeddable shell variable ipshell is just a
2795 2830 # dummy function.
2796 2831
2797 2832 try:
2798 2833 __IPYTHON__
2799 2834 except NameError:
2800 2835 from IPython.Shell import IPShellEmbed
2801 2836 ipshell = IPShellEmbed()
2802 2837 # Now ipshell() will open IPython anywhere in the code
2803 2838 else:
2804 2839 # Define a dummy ipshell() so the same code doesn't crash inside an
2805 2840 # interactive IPython
2806 2841 def ipshell(): pass
2807 2842
2808 2843 #******************* End of file <example-embed-short.py> ********************
2809 2844
2810 2845 Using the Python debugger (pdb)
2811 2846 ===============================
2812 2847
2813 2848 Running entire programs via pdb
2814 2849 -------------------------------
2815 2850
2816 2851 pdb, the Python debugger, is a powerful interactive debugger which
2817 2852 allows you to step through code, set breakpoints, watch variables,
2818 2853 etc. IPython makes it very easy to start any script under the control
2819 2854 of pdb, regardless of whether you have wrapped it into a 'main()'
2820 2855 function or not. For this, simply type '%run -d myscript' at an
2821 2856 IPython prompt. See the %run command's documentation (via '%run?' or
2822 2857 in Sec. magic_ for more details, including how to control where pdb
2823 2858 will stop execution first.
2824 2859
2825 2860 For more information on the use of the pdb debugger, read the included
2826 2861 pdb.doc file (part of the standard Python distribution). On a stock
2827 2862 Linux system it is located at /usr/lib/python2.3/pdb.doc, but the
2828 2863 easiest way to read it is by using the help() function of the pdb module
2829 2864 as follows (in an IPython prompt):
2830 2865
2831 2866 In [1]: import pdb
2832 2867 In [2]: pdb.help()
2833 2868
2834 2869 This will load the pdb.doc document in a file viewer for you automatically.
2835 2870
2836 2871
2837 2872 Automatic invocation of pdb on exceptions
2838 2873 -----------------------------------------
2839 2874
2840 2875 IPython, if started with the -pdb option (or if the option is set in
2841 2876 your rc file) can call the Python pdb debugger every time your code
2842 2877 triggers an uncaught exception. This feature
2843 2878 can also be toggled at any time with the %pdb magic command. This can be
2844 2879 extremely useful in order to find the origin of subtle bugs, because pdb
2845 2880 opens up at the point in your code which triggered the exception, and
2846 2881 while your program is at this point 'dead', all the data is still
2847 2882 available and you can walk up and down the stack frame and understand
2848 2883 the origin of the problem.
2849 2884
2850 2885 Furthermore, you can use these debugging facilities both with the
2851 2886 embedded IPython mode and without IPython at all. For an embedded shell
2852 2887 (see sec. Embedding_), simply call the constructor with
2853 2888 '-pdb' in the argument string and automatically pdb will be called if an
2854 2889 uncaught exception is triggered by your code.
2855 2890
2856 2891 For stand-alone use of the feature in your programs which do not use
2857 2892 IPython at all, put the following lines toward the top of your 'main'
2858 2893 routine::
2859 2894
2860 2895 import sys,IPython.ultraTB
2861 2896 sys.excepthook = IPython.ultraTB.FormattedTB(mode='Verbose',
2862 2897 color_scheme='Linux', call_pdb=1)
2863 2898
2864 2899 The mode keyword can be either 'Verbose' or 'Plain', giving either very
2865 2900 detailed or normal tracebacks respectively. The color_scheme keyword can
2866 2901 be one of 'NoColor', 'Linux' (default) or 'LightBG'. These are the same
2867 2902 options which can be set in IPython with -colors and -xmode.
2868 2903
2869 2904 This will give any of your programs detailed, colored tracebacks with
2870 2905 automatic invocation of pdb.
2871 2906
2872 2907
2873 2908 Extensions for syntax processing
2874 2909 ================================
2875 2910
2876 2911 This isn't for the faint of heart, because the potential for breaking
2877 2912 things is quite high. But it can be a very powerful and useful feature.
2878 2913 In a nutshell, you can redefine the way IPython processes the user input
2879 2914 line to accept new, special extensions to the syntax without needing to
2880 2915 change any of IPython's own code.
2881 2916
2882 2917 In the IPython/Extensions directory you will find some examples
2883 2918 supplied, which we will briefly describe now. These can be used 'as is'
2884 2919 (and both provide very useful functionality), or you can use them as a
2885 2920 starting point for writing your own extensions.
2886 2921
2887 2922
2888 2923 Pasting of code starting with '>>> ' or '... '
2889 2924 ----------------------------------------------
2890 2925
2891 2926 In the python tutorial it is common to find code examples which have
2892 2927 been taken from real python sessions. The problem with those is that all
2893 2928 the lines begin with either '>>> ' or '... ', which makes it impossible
2894 2929 to paste them all at once. One must instead do a line by line manual
2895 2930 copying, carefully removing the leading extraneous characters.
2896 2931
2897 2932 This extension identifies those starting characters and removes them
2898 2933 from the input automatically, so that one can paste multi-line examples
2899 2934 directly into IPython, saving a lot of time. Please look at the file
2900 2935 InterpreterPasteInput.py in the IPython/Extensions directory for details
2901 2936 on how this is done.
2902 2937
2903 2938 IPython comes with a special profile enabling this feature, called
2904 2939 tutorial. Simply start IPython via 'ipython -p tutorial' and the feature
2905 2940 will be available. In a normal IPython session you can activate the
2906 2941 feature by importing the corresponding module with:
2907 2942 In [1]: import IPython.Extensions.InterpreterPasteInput
2908 2943
2909 2944 The following is a 'screenshot' of how things work when this extension
2910 2945 is on, copying an example from the standard tutorial::
2911 2946
2912 2947 IPython profile: tutorial
2913 2948
2914 2949 *** Pasting of code with ">>>" or "..." has been enabled.
2915 2950
2916 2951 In [1]: >>> def fib2(n): # return Fibonacci series up to n
2917 2952 ...: ... """Return a list containing the Fibonacci series up to
2918 2953 n."""
2919 2954 ...: ... result = []
2920 2955 ...: ... a, b = 0, 1
2921 2956 ...: ... while b < n:
2922 2957 ...: ... result.append(b) # see below
2923 2958 ...: ... a, b = b, a+b
2924 2959 ...: ... return result
2925 2960 ...:
2926 2961
2927 2962 In [2]: fib2(10)
2928 2963 Out[2]: [1, 1, 2, 3, 5, 8]
2929 2964
2930 2965 Note that as currently written, this extension does not recognize
2931 2966 IPython's prompts for pasting. Those are more complicated, since the
2932 2967 user can change them very easily, they involve numbers and can vary in
2933 2968 length. One could however extract all the relevant information from the
2934 2969 IPython instance and build an appropriate regular expression. This is
2935 2970 left as an exercise for the reader.
2936 2971
2937 2972
2938 2973 Input of physical quantities with units
2939 2974 ---------------------------------------
2940 2975
2941 2976 The module PhysicalQInput allows a simplified form of input for physical
2942 2977 quantities with units. This file is meant to be used in conjunction with
2943 2978 the PhysicalQInteractive module (in the same directory) and
2944 2979 Physics.PhysicalQuantities from Konrad Hinsen's ScientificPython
2945 2980 (http://dirac.cnrs-orleans.fr/ScientificPython/).
2946 2981
2947 2982 The Physics.PhysicalQuantities module defines PhysicalQuantity objects,
2948 2983 but these must be declared as instances of a class. For example, to
2949 2984 define v as a velocity of 3 m/s, normally you would write::
2950 2985
2951 2986 In [1]: v = PhysicalQuantity(3,'m/s')
2952 2987
2953 2988 Using the PhysicalQ_Input extension this can be input instead as:
2954 2989 In [1]: v = 3 m/s
2955 2990 which is much more convenient for interactive use (even though it is
2956 2991 blatantly invalid Python syntax).
2957 2992
2958 2993 The physics profile supplied with IPython (enabled via 'ipython -p
2959 2994 physics') uses these extensions, which you can also activate with:
2960 2995
2961 2996 from math import * # math MUST be imported BEFORE PhysicalQInteractive
2962 2997 from IPython.Extensions.PhysicalQInteractive import *
2963 2998 import IPython.Extensions.PhysicalQInput
2964 2999
2965 3000
2966 3001 Threading support
2967 3002 =================
2968 3003
2969 3004 WARNING: The threading support is still somewhat experimental, and it
2970 3005 has only seen reasonable testing under Linux. Threaded code is
2971 3006 particularly tricky to debug, and it tends to show extremely
2972 3007 platform-dependent behavior. Since I only have access to Linux machines,
2973 3008 I will have to rely on user's experiences and assistance for this area
2974 3009 of IPython to improve under other platforms.
2975 3010
2976 3011 IPython, via the -gthread , -qthread, -q4thread and -wthread options
2977 3012 (described in Sec. `Threading options`_), can run in
2978 3013 multithreaded mode to support pyGTK, Qt3, Qt4 and WXPython applications
2979 3014 respectively. These GUI toolkits need to control the python main loop of
2980 3015 execution, so under a normal Python interpreter, starting a pyGTK, Qt3,
2981 3016 Qt4 or WXPython application will immediately freeze the shell.
2982 3017
2983 3018 IPython, with one of these options (you can only use one at a time),
2984 3019 separates the graphical loop and IPython's code execution run into
2985 3020 different threads. This allows you to test interactively (with %run, for
2986 3021 example) your GUI code without blocking.
2987 3022
2988 3023 A nice mini-tutorial on using IPython along with the Qt Designer
2989 3024 application is available at the SciPy wiki:
2990 3025 http://www.scipy.org/Cookbook/Matplotlib/Qt_with_IPython_and_Designer.
2991 3026
2992 3027
2993 3028 Tk issues
2994 3029 ---------
2995 3030
2996 3031 As indicated in Sec. `Threading options`_, a special -tk option is
2997 3032 provided to try and allow Tk graphical applications to coexist
2998 3033 interactively with WX, Qt or GTK ones. Whether this works at all,
2999 3034 however, is very platform and configuration dependent. Please
3000 3035 experiment with simple test cases before committing to using this
3001 3036 combination of Tk and GTK/Qt/WX threading in a production environment.
3002 3037
3003 3038
3004 3039 I/O pitfalls
3005 3040 ------------
3006 3041
3007 3042 Be mindful that the Python interpreter switches between threads every
3008 3043 $N$ bytecodes, where the default value as of Python 2.3 is $N=100.$ This
3009 3044 value can be read by using the sys.getcheckinterval() function, and it
3010 3045 can be reset via sys.setcheckinterval(N). This switching of threads can
3011 3046 cause subtly confusing effects if one of your threads is doing file I/O.
3012 3047 In text mode, most systems only flush file buffers when they encounter a
3013 3048 '\n'. An instruction as simple as::
3014 3049
3015 3050 print >> filehandle, ''hello world''
3016 3051
3017 3052 actually consists of several bytecodes, so it is possible that the
3018 3053 newline does not reach your file before the next thread switch.
3019 3054 Similarly, if you are writing to a file in binary mode, the file won't
3020 3055 be flushed until the buffer fills, and your other thread may see
3021 3056 apparently truncated files.
3022 3057
3023 3058 For this reason, if you are using IPython's thread support and have (for
3024 3059 example) a GUI application which will read data generated by files
3025 3060 written to from the IPython thread, the safest approach is to open all
3026 3061 of your files in unbuffered mode (the third argument to the file/open
3027 3062 function is the buffering value)::
3028 3063
3029 3064 filehandle = open(filename,mode,0)
3030 3065
3031 3066 This is obviously a brute force way of avoiding race conditions with the
3032 3067 file buffering. If you want to do it cleanly, and you have a resource
3033 3068 which is being shared by the interactive IPython loop and your GUI
3034 3069 thread, you should really handle it with thread locking and
3035 3070 syncrhonization properties. The Python documentation discusses these.
3036 3071
3037 .. _Interactive demos:
3072 .. _interactive_demos:
3038 3073
3039 3074 Interactive demos with IPython
3040 3075 ==============================
3041 3076
3042 3077 IPython ships with a basic system for running scripts interactively in
3043 3078 sections, useful when presenting code to audiences. A few tags embedded
3044 3079 in comments (so that the script remains valid Python code) divide a file
3045 3080 into separate blocks, and the demo can be run one block at a time, with
3046 3081 IPython printing (with syntax highlighting) the block before executing
3047 3082 it, and returning to the interactive prompt after each block. The
3048 3083 interactive namespace is updated after each block is run with the
3049 3084 contents of the demo's namespace.
3050 3085
3051 3086 This allows you to show a piece of code, run it and then execute
3052 3087 interactively commands based on the variables just created. Once you
3053 3088 want to continue, you simply execute the next block of the demo. The
3054 3089 following listing shows the markup necessary for dividing a script into
3055 3090 sections for execution as a demo::
3056 3091
3057 3092
3058 3093 """A simple interactive demo to illustrate the use of IPython's Demo class.
3059 3094
3060 3095 Any python script can be run as a demo, but that does little more than showing
3061 3096 it on-screen, syntax-highlighted in one shot. If you add a little simple
3062 3097 markup, you can stop at specified intervals and return to the ipython prompt,
3063 3098 resuming execution later.
3064 3099 """
3065 3100
3066 3101 print 'Hello, welcome to an interactive IPython demo.'
3067 3102 print 'Executing this block should require confirmation before proceeding,'
3068 3103 print 'unless auto_all has been set to true in the demo object'
3069 3104
3070 3105 # The mark below defines a block boundary, which is a point where IPython will
3071 3106 # stop execution and return to the interactive prompt.
3072 3107 # Note that in actual interactive execution,
3073 3108 # <demo> --- stop ---
3074 3109
3075 3110 x = 1
3076 3111 y = 2
3077 3112
3078 3113 # <demo> --- stop ---
3079 3114
3080 3115 # the mark below makes this block as silent
3081 3116 # <demo> silent
3082 3117
3083 3118 print 'This is a silent block, which gets executed but not printed.'
3084 3119
3085 3120 # <demo> --- stop ---
3086 3121 # <demo> auto
3087 3122 print 'This is an automatic block.'
3088 3123 print 'It is executed without asking for confirmation, but printed.'
3089 3124 z = x+y
3090 3125
3091 3126 print 'z=',x
3092 3127
3093 3128 # <demo> --- stop ---
3094 3129 # This is just another normal block.
3095 3130 print 'z is now:', z
3096 3131
3097 3132 print 'bye!'
3098 3133
3099 3134 In order to run a file as a demo, you must first make a Demo object out
3100 3135 of it. If the file is named myscript.py, the following code will make a
3101 3136 demo::
3102 3137
3103 3138 from IPython.demo import Demo
3104 3139
3105 3140 mydemo = Demo('myscript.py')
3106 3141
3107 3142 This creates the mydemo object, whose blocks you run one at a time by
3108 3143 simply calling the object with no arguments. If you have autocall active
3109 3144 in IPython (the default), all you need to do is type::
3110 3145
3111 3146 mydemo
3112 3147
3113 3148 and IPython will call it, executing each block. Demo objects can be
3114 3149 restarted, you can move forward or back skipping blocks, re-execute the
3115 3150 last block, etc. Simply use the Tab key on a demo object to see its
3116 3151 methods, and call '?' on them to see their docstrings for more usage
3117 3152 details. In addition, the demo module itself contains a comprehensive
3118 3153 docstring, which you can access via::
3119 3154
3120 3155 from IPython import demo
3121 3156
3122 3157 demo?
3123 3158
3124 3159 Limitations: It is important to note that these demos are limited to
3125 3160 fairly simple uses. In particular, you can not put division marks in
3126 3161 indented code (loops, if statements, function definitions, etc.)
3127 3162 Supporting something like this would basically require tracking the
3128 3163 internal execution state of the Python interpreter, so only top-level
3129 3164 divisions are allowed. If you want to be able to open an IPython
3130 3165 instance at an arbitrary point in a program, you can use IPython's
3131 3166 embedding facilities, described in detail in Sec. 9
3132 3167
3133 3168
3134 3169 .. _Matplotlib support:
3135 3170
3136 3171 Plotting with matplotlib
3137 3172 ========================
3138 3173
3139 3174 The matplotlib library (http://matplotlib.sourceforge.net
3140 3175 http://matplotlib.sourceforge.net) provides high quality 2D plotting for
3141 3176 Python. Matplotlib can produce plots on screen using a variety of GUI
3142 3177 toolkits, including Tk, GTK and WXPython. It also provides a number of
3143 3178 commands useful for scientific computing, all with a syntax compatible
3144 3179 with that of the popular Matlab program.
3145 3180
3146 IPython accepts the special option -pylab (Sec. `Command line
3147 options`_). This configures it to support matplotlib, honoring the
3148 settings in the .matplotlibrc file. IPython will detect the user's
3149 choice of matplotlib GUI backend, and automatically select the proper
3150 threading model to prevent blocking. It also sets matplotlib in
3151 interactive mode and modifies %run slightly, so that any
3152 matplotlib-based script can be executed using %run and the final
3153 show() command does not block the interactive shell.
3154
3155 The -pylab option must be given first in order for IPython to
3156 configure its threading mode. However, you can still issue other
3157 options afterwards. This allows you to have a matplotlib-based
3158 environment customized with additional modules using the standard
3159 IPython profile mechanism (Sec. Profiles_): ''ipython -pylab -p
3160 myprofile'' will load the profile defined in ipythonrc-myprofile after
3161 configuring matplotlib.
3162
3163
3181 IPython accepts the special option -pylab (see :ref:`here
3182 <command_line_options>`). This configures it to support matplotlib, honoring
3183 the settings in the .matplotlibrc file. IPython will detect the user's choice
3184 of matplotlib GUI backend, and automatically select the proper threading model
3185 to prevent blocking. It also sets matplotlib in interactive mode and modifies
3186 %run slightly, so that any matplotlib-based script can be executed using %run
3187 and the final show() command does not block the interactive shell.
3188
3189 The -pylab option must be given first in order for IPython to configure its
3190 threading mode. However, you can still issue other options afterwards. This
3191 allows you to have a matplotlib-based environment customized with additional
3192 modules using the standard IPython profile mechanism (see :ref:`here
3193 <profiles>`): ``ipython -pylab -p myprofile`` will load the profile defined in
3194 ipythonrc-myprofile after configuring matplotlib.
@@ -1,315 +1,315
1 1 .. _tutorial:
2 2
3 3 ======================
4 4 Quick IPython tutorial
5 5 ======================
6 6
7 .. contents::
8
9 7 IPython can be used as an improved replacement for the Python prompt,
10 8 and for that you don't really need to read any more of this manual. But
11 9 in this section we'll try to summarize a few tips on how to make the
12 10 most effective use of it for everyday Python development, highlighting
13 11 things you might miss in the rest of the manual (which is getting long).
14 12 We'll give references to parts in the manual which provide more detail
15 13 when appropriate.
16 14
17 15 The following article by Jeremy Jones provides an introductory tutorial
18 16 about IPython: http://www.onlamp.com/pub/a/python/2005/01/27/ipython.html
19 17
20 18 Highlights
21 19 ==========
22 20
23 21 Tab completion
24 22 --------------
25 23
26 24 TAB-completion, especially for attributes, is a convenient way to explore the
27 structure of any object you're dealing with. Simply type object_name.<TAB>
28 and a list of the object's attributes will be printed (see readline_ for
29 more). Tab completion also works on file and directory names, which combined
30 with IPython's alias system allows you to do from within IPython many of the
31 things you normally would need the system shell for.
25 structure of any object you're dealing with. Simply type object_name.<TAB> and
26 a list of the object's attributes will be printed (see :ref:`the readline
27 section <readline>` for more). Tab completion also works on file and directory
28 names, which combined with IPython's alias system allows you to do from within
29 IPython many of the things you normally would need the system shell for.
32 30
33 31 Explore your objects
34 32 --------------------
35 33
36 34 Typing object_name? will print all sorts of details about any object,
37 35 including docstrings, function definition lines (for call arguments) and
38 36 constructor details for classes. The magic commands %pdoc, %pdef, %psource
39 37 and %pfile will respectively print the docstring, function definition line,
40 38 full source code and the complete file for any object (when they can be
41 39 found). If automagic is on (it is by default), you don't need to type the '%'
42 explicitly. See sec. `dynamic object information`_ for more.
40 explicitly. See :ref:`this section <dynamic_object_info>` for more.
43 41
44 42 The `%run` magic command
45 43 ------------------------
46 44
47 The %run magic command allows you to run any python script and load all of
48 its data directly into the interactive namespace. Since the file is re-read
49 from disk each time, changes you make to it are reflected immediately (in
50 contrast to the behavior of import). I rarely use import for code I am
51 testing, relying on %run instead. See magic_ section for more on this and
52 other magic commands, or type the name of any magic command and ? to get
53 details on it. See also sec. dreload_ for a recursive reload command. %run
45 The %run magic command allows you to run any python script and load all of its
46 data directly into the interactive namespace. Since the file is re-read from
47 disk each time, changes you make to it are reflected immediately (in contrast
48 to the behavior of import). I rarely use import for code I am testing, relying
49 on %run instead. See :ref:`this section <magic>` for more on this and other
50 magic commands, or type the name of any magic command and ? to get details on
51 it. See also :ref:`this section <dreload>` for a recursive reload command. %run
54 52 also has special flags for timing the execution of your scripts (-t) and for
55 53 executing them under the control of either Python's pdb debugger (-d) or
56 54 profiler (-p). With all of these, %run can be used as the main tool for
57 55 efficient interactive development of code which you write in your editor of
58 56 choice.
59 57
60 58 Debug a Python script
61 59 ---------------------
62 60
63 Use the Python debugger, pdb. The %pdb command allows you to toggle on and
64 off the automatic invocation of an IPython-enhanced pdb debugger (with
65 coloring, tab completion and more) at any uncaught exception. The advantage
66 of this is that pdb starts inside the function where the exception occurred,
67 with all data still available. You can print variables, see code, execute
68 statements and even walk up and down the call stack to track down the true
69 source of the problem (which often is many layers in the stack above where
70 the exception gets triggered). Running programs with %run and pdb active can
71 be an efficient to develop and debug code, in many cases eliminating the need
72 for print statements or external debugging tools. I often simply put a 1/0 in
73 a place where I want to take a look so that pdb gets called, quickly view
74 whatever variables I need to or test various pieces of code and then remove
75 the 1/0. Note also that '%run -d' activates pdb and automatically sets
76 initial breakpoints for you to step through your code, watch variables, etc.
77 See Sec. `Output caching`_ for details.
61 Use the Python debugger, pdb. The %pdb command allows you to toggle on and off
62 the automatic invocation of an IPython-enhanced pdb debugger (with coloring,
63 tab completion and more) at any uncaught exception. The advantage of this is
64 that pdb starts inside the function where the exception occurred, with all data
65 still available. You can print variables, see code, execute statements and even
66 walk up and down the call stack to track down the true source of the problem
67 (which often is many layers in the stack above where the exception gets
68 triggered). Running programs with %run and pdb active can be an efficient to
69 develop and debug code, in many cases eliminating the need for print statements
70 or external debugging tools. I often simply put a 1/0 in a place where I want
71 to take a look so that pdb gets called, quickly view whatever variables I need
72 to or test various pieces of code and then remove the 1/0. Note also that '%run
73 -d' activates pdb and automatically sets initial breakpoints for you to step
74 through your code, watch variables, etc. The :ref:`output caching section
75 <output_caching>` has more details.
78 76
79 77 Use the output cache
80 78 --------------------
81 79
82 80 All output results are automatically stored in a global dictionary named Out
83 81 and variables named _1, _2, etc. alias them. For example, the result of input
84 82 line 4 is available either as Out[4] or as _4. Additionally, three variables
85 83 named _, __ and ___ are always kept updated with the for the last three
86 84 results. This allows you to recall any previous result and further use it for
87 new calculations. See Sec. `Output caching`_ for more.
85 new calculations. See :ref:`the output caching section <output_caching>` for
86 more.
88 87
89 88 Suppress output
90 89 ---------------
91 90
92 91 Put a ';' at the end of a line to suppress the printing of output. This is
93 92 useful when doing calculations which generate long output you are not
94 93 interested in seeing. The _* variables and the Out[] list do get updated with
95 94 the contents of the output, even if it is not printed. You can thus still
96 95 access the generated results this way for further processing.
97 96
98 97 Input cache
99 98 -----------
100 99
101 100 A similar system exists for caching input. All input is stored in a global
102 101 list called In , so you can re-execute lines 22 through 28 plus line 34 by
103 102 typing 'exec In[22:29]+In[34]' (using Python slicing notation). If you need
104 103 to execute the same set of lines often, you can assign them to a macro with
105 the %macro function. See sec. `Input caching`_ for more.
104 the %macro function. See :ref:`here <input_caching>` for more.
106 105
107 106 Use your input history
108 107 ----------------------
109 108
110 109 The %hist command can show you all previous input, without line numbers if
111 110 desired (option -n) so you can directly copy and paste code either back in
112 111 IPython or in a text editor. You can also save all your history by turning on
113 112 logging via %logstart; these logs can later be either reloaded as IPython
114 113 sessions or used as code for your programs.
115 114
116 115 Define your own system aliases
117 116 ------------------------------
118 117
119 118 Even though IPython gives you access to your system shell via the ! prefix,
120 119 it is convenient to have aliases to the system commands you use most often.
121 120 This allows you to work seamlessly from inside IPython with the same commands
122 121 you are used to in your system shell. IPython comes with some pre-defined
123 122 aliases and a complete system for changing directories, both via a stack (see
124 123 %pushd, %popd and %dhist) and via direct %cd. The latter keeps a history of
125 124 visited directories and allows you to go to any previously visited one.
126 125
127 126 Call system shell commands
128 127 --------------------------
129 128
130 129 Use Python to manipulate the results of system commands. The '!!' special
131 130 syntax, and the %sc and %sx magic commands allow you to capture system output
132 131 into Python variables.
133 132
134 133 Use Python variables when calling the shell
135 134 -------------------------------------------
136 135
137 Expand python variables when calling the shell (either via '!' and '!!' or
138 via aliases) by prepending a $ in front of them. You can also expand complete
139 python expressions. See `System shell access`_ for more.
136 Expand python variables when calling the shell (either via '!' and '!!' or via
137 aliases) by prepending a $ in front of them. You can also expand complete
138 python expressions. See :ref:`our shell section <system_shell_access>` for
139 more details.
140 140
141 141 Use profiles
142 142 ------------
143 143
144 144 Use profiles to maintain different configurations (modules to load, function
145 145 definitions, option settings) for particular tasks. You can then have
146 customized versions of IPython for specific purposes. See sec. profiles_ for
147 more.
146 customized versions of IPython for specific purposes. :ref:`This section
147 <profiles>` has more details.
148 148
149 149
150 150 Embed IPython in your programs
151 151 ------------------------------
152 152
153 153 A few lines of code are enough to load a complete IPython inside your own
154 154 programs, giving you the ability to work with your data interactively after
155 automatic processing has been completed. See sec. embedding_ for more.
155 automatic processing has been completed. See :ref:`here <embedding>` for more.
156 156
157 157 Use the Python profiler
158 158 -----------------------
159 159
160 160 When dealing with performance issues, the %run command with a -p option
161 161 allows you to run complete programs under the control of the Python profiler.
162 162 The %prun command does a similar job for single Python expressions (like
163 163 function calls).
164 164
165 165 Use IPython to present interactive demos
166 166 ----------------------------------------
167 167
168 168 Use the IPython.demo.Demo class to load any Python script as an interactive
169 demo. With a minimal amount of simple markup, you can control the execution
170 of the script, stopping as needed. See sec. `interactive demos`_ for more.
169 demo. With a minimal amount of simple markup, you can control the execution of
170 the script, stopping as needed. See :ref:`here <interactive_demos>` for more.
171 171
172 172 Run doctests
173 173 ------------
174 174
175 175 Run your doctests from within IPython for development and debugging. The
176 176 special %doctest_mode command toggles a mode where the prompt, output and
177 177 exceptions display matches as closely as possible that of the default Python
178 178 interpreter. In addition, this mode allows you to directly paste in code that
179 179 contains leading '>>>' prompts, even if they have extra leading whitespace
180 180 (as is common in doctest files). This combined with the '%history -tn' call
181 181 to see your translated history (with these extra prompts removed and no line
182 182 numbers) allows for an easy doctest workflow, where you can go from doctest
183 183 to interactive execution to pasting into valid Python code as needed.
184 184
185 185 Source code handling tips
186 186 =========================
187 187
188 188 IPython is a line-oriented program, without full control of the
189 189 terminal. Therefore, it doesn't support true multiline editing. However,
190 190 it has a number of useful tools to help you in dealing effectively with
191 191 more complex editing.
192 192
193 193 The %edit command gives a reasonable approximation of multiline editing,
194 194 by invoking your favorite editor on the spot. IPython will execute the
195 195 code you type in there as if it were typed interactively. Type %edit?
196 196 for the full details on the edit command.
197 197
198 198 If you have typed various commands during a session, which you'd like to
199 199 reuse, IPython provides you with a number of tools. Start by using %hist
200 200 to see your input history, so you can see the line numbers of all input.
201 201 Let us say that you'd like to reuse lines 10 through 20, plus lines 24
202 202 and 28. All the commands below can operate on these with the syntax::
203 203
204 204 %command 10-20 24 28
205 205
206 206 where the command given can be:
207 207
208 208 * %macro <macroname>: this stores the lines into a variable which,
209 209 when called at the prompt, re-executes the input. Macros can be
210 210 edited later using '%edit macroname', and they can be stored
211 211 persistently across sessions with '%store macroname' (the storage
212 212 system is per-profile). The combination of quick macros,
213 213 persistent storage and editing, allows you to easily refine
214 214 quick-and-dirty interactive input into permanent utilities, always
215 215 available both in IPython and as files for general reuse.
216 216 * %edit: this will open a text editor with those lines pre-loaded
217 217 for further modification. It will then execute the resulting
218 218 file's contents as if you had typed it at the prompt.
219 219 * %save <filename>: this saves the lines directly to a named file on
220 220 disk.
221 221
222 222 While %macro saves input lines into memory for interactive re-execution,
223 223 sometimes you'd like to save your input directly to a file. The %save
224 224 magic does this: its input sytnax is the same as %macro, but it saves
225 225 your input directly to a Python file. Note that the %logstart command
226 226 also saves input, but it logs all input to disk (though you can
227 227 temporarily suspend it and reactivate it with %logoff/%logon); %save
228 228 allows you to select which lines of input you need to save.
229 229
230 230
231 231 Lightweight 'version control'
232 232 =============================
233 233
234 234 When you call %edit with no arguments, IPython opens an empty editor
235 235 with a temporary file, and it returns the contents of your editing
236 236 session as a string variable. Thanks to IPython's output caching
237 237 mechanism, this is automatically stored::
238 238
239 239 In [1]: %edit
240 240
241 241 IPython will make a temporary file named: /tmp/ipython_edit_yR-HCN.py
242 242
243 243 Editing... done. Executing edited code...
244 244
245 245 hello - this is a temporary file
246 246
247 247 Out[1]: "print 'hello - this is a temporary file'\n"
248 248
249 249 Now, if you call '%edit -p', IPython tries to open an editor with the
250 250 same data as the last time you used %edit. So if you haven't used %edit
251 251 in the meantime, this same contents will reopen; however, it will be
252 252 done in a new file. This means that if you make changes and you later
253 253 want to find an old version, you can always retrieve it by using its
254 254 output number, via '%edit _NN', where NN is the number of the output
255 255 prompt.
256 256
257 257 Continuing with the example above, this should illustrate this idea::
258 258
259 259 In [2]: edit -p
260 260
261 261 IPython will make a temporary file named: /tmp/ipython_edit_nA09Qk.py
262 262
263 263 Editing... done. Executing edited code...
264 264
265 265 hello - now I made some changes
266 266
267 267 Out[2]: "print 'hello - now I made some changes'\n"
268 268
269 269 In [3]: edit _1
270 270
271 271 IPython will make a temporary file named: /tmp/ipython_edit_gy6-zD.py
272 272
273 273 Editing... done. Executing edited code...
274 274
275 275 hello - this is a temporary file
276 276
277 277 IPython version control at work :)
278 278
279 279 Out[3]: "print 'hello - this is a temporary file'\nprint 'IPython version control at work :)'\n"
280 280
281 281
282 282 This section was written after a contribution by Alexander Belchenko on
283 283 the IPython user list.
284 284
285 285
286 286 Effective logging
287 287 =================
288 288
289 289 A very useful suggestion sent in by Robert Kern follows:
290 290
291 291 I recently happened on a nifty way to keep tidy per-project log files. I
292 292 made a profile for my project (which is called "parkfield")::
293 293
294 294 include ipythonrc
295 295
296 296 # cancel earlier logfile invocation:
297 297
298 298 logfile ''
299 299
300 300 execute import time
301 301
302 302 execute __cmd = '/Users/kern/research/logfiles/parkfield-%s.log rotate'
303 303
304 304 execute __IP.magic_logstart(__cmd % time.strftime('%Y-%m-%d'))
305 305
306 306 I also added a shell alias for convenience::
307 307
308 308 alias parkfield="ipython -pylab -profile parkfield"
309 309
310 310 Now I have a nice little directory with everything I ever type in,
311 311 organized by project and date.
312 312
313 313 Contribute your own: If you have your own favorite tip on using IPython
314 314 efficiently for a certain task (especially things which can't be done in
315 315 the normal Python interpreter), don't hesitate to send it!
@@ -1,61 +1,91
1 1 .. _license:
2 2
3 =============================
4 License and Copyright
5 =============================
3 =====================
4 License and Copyright
5 =====================
6 6
7 This files needs to be updated to reflect what the new COPYING.txt files says about our license and copyright!
7 License
8 =======
8 9
9 IPython is released under the terms of the BSD license, whose general
10 form can be found at: http://www.opensource.org/licenses/bsd-license.php. The full text of the
11 IPython license is reproduced below::
10 IPython is licensed under the terms of the new or revised BSD license, as follows::
12 11
13 IPython is released under a BSD-type license.
14
15 Copyright (c) 2001, 2002, 2003, 2004 Fernando Perez
16 <fperez@colorado.edu>.
17
18 Copyright (c) 2001 Janko Hauser <jhauser@zscout.de> and
19 Nathaniel Gray <n8gray@caltech.edu>.
12 Copyright (c) 2008, IPython Development Team
20 13
21 14 All rights reserved.
22 15
23 16 Redistribution and use in source and binary forms, with or without
24 modification, are permitted provided that the following conditions
25 are met:
26
27 a. Redistributions of source code must retain the above copyright
28 notice, this list of conditions and the following disclaimer.
29
30 b. Redistributions in binary form must reproduce the above copyright
31 notice, this list of conditions and the following disclaimer in the
32 documentation and/or other materials provided with the distribution.
33
34 c. Neither the name of the copyright holders nor the names of any
35 contributors to this software may be used to endorse or promote
36 products derived from this software without specific prior written
37 permission.
38
39 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
40 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
41 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
42 FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
43 REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
44 INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
45 BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
46 LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
47 CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
48 LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
49 ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
50 POSSIBILITY OF SUCH DAMAGE.
51
52 Individual authors are the holders of the copyright for their code and
53 are listed in each file.
17 modification, are permitted provided that the following conditions are
18 met:
19
20 Redistributions of source code must retain the above copyright notice,
21 this list of conditions and the following disclaimer.
22
23 Redistributions in binary form must reproduce the above copyright notice,
24 this list of conditions and the following disclaimer in the documentation
25 and/or other materials provided with the distribution.
26
27 Neither the name of the IPython Development Team nor the names of its
28 contributors may be used to endorse or promote products derived from this
29 software without specific prior written permission.
30
31 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
32 IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
33 THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
34 PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
35 CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
36 EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
37 PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
38 PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
39 LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
40 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
41 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42
43 About the IPython Development Team
44 ==================================
45
46 Fernando Perez began IPython in 2001 based on code from Janko Hauser
47 <jhauser@zscout.de> and Nathaniel Gray <n8gray@caltech.edu>. Fernando is still
48 the project lead.
49
50 The IPython Development Team is the set of all contributors to the IPython
51 project. This includes all of the IPython subprojects. Here is a list of the
52 currently active contributors:
53
54 * Matthieu Brucher
55 * Ondrej Certik
56 * Laurent Dufrechou
57 * Robert Kern
58 * Brian E. Granger
59 * Fernando Perez (project leader)
60 * Benjamin Ragan-Kelley
61 * Ville M. Vainio
62 * Gael Varoququx
63 * Stefan van der Walt
64 * Tech-X Corporation
65 * Barry Wark
66
67 If your name is missing, please add it.
68
69 Our Copyright Policy
70 ====================
71
72 IPython uses a shared copyright model. Each contributor maintains copyright
73 over their contributions to IPython. But, it is important to note that these
74 contributions are typically only changes to the repositories. Thus, the
75 IPython source code, in its entirety is not the copyright of any single person
76 or institution. Instead, it is the collective copyright of the entire IPython
77 Development Team. If individual contributors want to maintain a record of what
78 changes/contributions they have specific copyright on, they should indicate
79 their copyright in the commit message of the change, when they commit the
80 change to one of the IPython repositories.
81
82 Miscellaneous
83 =============
54 84
55 85 Some files (DPyGetOpt.py, for example) may be licensed under different
56 conditions. Ultimately each file indicates clearly the conditions under
57 which its author/authors have decided to publish the code.
86 conditions. Ultimately each file indicates clearly the conditions under which
87 its author/authors have decided to publish the code.
58 88
59 Versions of IPython up to and including 0.6.3 were released under the
60 GNU Lesser General Public License (LGPL), available at
89 Versions of IPython up to and including 0.6.3 were released under the GNU
90 Lesser General Public License (LGPL), available at
61 91 http://www.gnu.org/copyleft/lesser.html. No newline at end of file
@@ -1,174 +1,233
1 1 .. _overview:
2 2
3 3 ============
4 4 Introduction
5 5 ============
6 6
7 7 Overview
8 8 ========
9 9
10 10 One of Python's most useful features is its interactive interpreter.
11 11 This system allows very fast testing of ideas without the overhead of
12 12 creating test files as is typical in most programming languages.
13 13 However, the interpreter supplied with the standard Python distribution
14 14 is somewhat limited for extended interactive use.
15 15
16 16 The goal of IPython is to create a comprehensive environment for
17 interactive and exploratory computing. To support, this goal, IPython
17 interactive and exploratory computing. To support this goal, IPython
18 18 has two main components:
19 19
20 * An enhanced interactive Python shell.
21 * An architecture for interactive parallel computing.
20 * An enhanced interactive Python shell.
21 * An architecture for interactive parallel computing.
22 22
23 23 All of IPython is open source (released under the revised BSD license).
24 24
25 25 Enhanced interactive Python shell
26 26 =================================
27 27
28 IPython's interactive shell (`ipython`), has the following goals:
29
30 1. Provide an interactive shell superior to Python's default. IPython
31 has many features for object introspection, system shell access,
32 and its own special command system for adding functionality when
33 working interactively. It tries to be a very efficient environment
34 both for Python code development and for exploration of problems
35 using Python objects (in situations like data analysis).
36 2. Serve as an embeddable, ready to use interpreter for your own
37 programs. IPython can be started with a single call from inside
38 another program, providing access to the current namespace. This
39 can be very useful both for debugging purposes and for situations
40 where a blend of batch-processing and interactive exploration are
41 needed.
42 3. Offer a flexible framework which can be used as the base
43 environment for other systems with Python as the underlying
44 language. Specifically scientific environments like Mathematica,
45 IDL and Matlab inspired its design, but similar ideas can be
46 useful in many fields.
47 4. Allow interactive testing of threaded graphical toolkits. IPython
48 has support for interactive, non-blocking control of GTK, Qt and
49 WX applications via special threading flags. The normal Python
50 shell can only do this for Tkinter applications.
28 IPython's interactive shell (:command:`ipython`), has the following goals,
29 amongst others:
30
31 1. Provide an interactive shell superior to Python's default. IPython
32 has many features for object introspection, system shell access,
33 and its own special command system for adding functionality when
34 working interactively. It tries to be a very efficient environment
35 both for Python code development and for exploration of problems
36 using Python objects (in situations like data analysis).
37
38 2. Serve as an embeddable, ready to use interpreter for your own
39 programs. IPython can be started with a single call from inside
40 another program, providing access to the current namespace. This
41 can be very useful both for debugging purposes and for situations
42 where a blend of batch-processing and interactive exploration are
43 needed. New in the 0.9 version of IPython is a reusable wxPython
44 based IPython widget.
45
46 3. Offer a flexible framework which can be used as the base
47 environment for other systems with Python as the underlying
48 language. Specifically scientific environments like Mathematica,
49 IDL and Matlab inspired its design, but similar ideas can be
50 useful in many fields.
51
52 4. Allow interactive testing of threaded graphical toolkits. IPython
53 has support for interactive, non-blocking control of GTK, Qt and
54 WX applications via special threading flags. The normal Python
55 shell can only do this for Tkinter applications.
51 56
52 57 Main features of the interactive shell
53 58 --------------------------------------
54 59
55 * Dynamic object introspection. One can access docstrings, function
56 definition prototypes, source code, source files and other details
57 of any object accessible to the interpreter with a single
58 keystroke (:samp:`?`, and using :samp:`??` provides additional detail).
59 * Searching through modules and namespaces with :samp:`*` wildcards, both
60 when using the :samp:`?` system and via the :samp:`%psearch` command.
61 * Completion in the local namespace, by typing :kbd:`TAB` at the prompt.
62 This works for keywords, modules, methods, variables and files in the
63 current directory. This is supported via the readline library, and
64 full access to configuring readline's behavior is provided.
65 Custom completers can be implemented easily for different purposes
66 (system commands, magic arguments etc.)
67 * Numbered input/output prompts with command history (persistent
68 across sessions and tied to each profile), full searching in this
69 history and caching of all input and output.
70 * User-extensible 'magic' commands. A set of commands prefixed with
71 :samp:`%` is available for controlling IPython itself and provides
72 directory control, namespace information and many aliases to
73 common system shell commands.
74 * Alias facility for defining your own system aliases.
75 * Complete system shell access. Lines starting with :samp:`!` are passed
76 directly to the system shell, and using :samp:`!!` or :samp:`var = !cmd`
77 captures shell output into python variables for further use.
78 * Background execution of Python commands in a separate thread.
79 IPython has an internal job manager called jobs, and a
80 conveninence backgrounding magic function called :samp:`%bg`.
81 * The ability to expand python variables when calling the system
82 shell. In a shell command, any python variable prefixed with :samp:`$` is
83 expanded. A double :samp:`$$` allows passing a literal :samp:`$` to the shell (for
84 access to shell and environment variables like :envvar:`PATH`).
85 * Filesystem navigation, via a magic :samp:`%cd` command, along with a
86 persistent bookmark system (using :samp:`%bookmark`) for fast access to
87 frequently visited directories.
88 * A lightweight persistence framework via the :samp:`%store` command, which
89 allows you to save arbitrary Python variables. These get restored
90 automatically when your session restarts.
91 * Automatic indentation (optional) of code as you type (through the
92 readline library).
93 * Macro system for quickly re-executing multiple lines of previous
94 input with a single name. Macros can be stored persistently via
95 :samp:`%store` and edited via :samp:`%edit`.
96 * Session logging (you can then later use these logs as code in your
97 programs). Logs can optionally timestamp all input, and also store
98 session output (marked as comments, so the log remains valid
99 Python source code).
100 * Session restoring: logs can be replayed to restore a previous
101 session to the state where you left it.
102 * Verbose and colored exception traceback printouts. Easier to parse
103 visually, and in verbose mode they produce a lot of useful
104 debugging information (basically a terminal version of the cgitb
105 module).
106 * Auto-parentheses: callable objects can be executed without
107 parentheses: :samp:`sin 3` is automatically converted to :samp:`sin(3)`.
108 * Auto-quoting: using :samp:`,`, or :samp:`;` as the first character forces
109 auto-quoting of the rest of the line: :samp:`,my_function a b` becomes
110 automatically :samp:`my_function("a","b")`, while :samp:`;my_function a b`
111 becomes :samp:`my_function("a b")`.
112 * Extensible input syntax. You can define filters that pre-process
113 user input to simplify input in special situations. This allows
114 for example pasting multi-line code fragments which start with
115 :samp:`>>>` or :samp:`...` such as those from other python sessions or the
116 standard Python documentation.
117 * Flexible configuration system. It uses a configuration file which
118 allows permanent setting of all command-line options, module
119 loading, code and file execution. The system allows recursive file
120 inclusion, so you can have a base file with defaults and layers
121 which load other customizations for particular projects.
122 * Embeddable. You can call IPython as a python shell inside your own
123 python programs. This can be used both for debugging code or for
124 providing interactive abilities to your programs with knowledge
125 about the local namespaces (very useful in debugging and data
126 analysis situations).
127 * Easy debugger access. You can set IPython to call up an enhanced
128 version of the Python debugger (pdb) every time there is an
129 uncaught exception. This drops you inside the code which triggered
130 the exception with all the data live and it is possible to
131 navigate the stack to rapidly isolate the source of a bug. The
132 :samp:`%run` magic command (with the :samp:`-d` option) can run any script under
133 pdb's control, automatically setting initial breakpoints for you.
134 This version of pdb has IPython-specific improvements, including
135 tab-completion and traceback coloring support. For even easier
136 debugger access, try :samp:`%debug` after seeing an exception. winpdb is
137 also supported, see ipy_winpdb extension.
138 * Profiler support. You can run single statements (similar to
139 :samp:`profile.run()`) or complete programs under the profiler's control.
140 While this is possible with standard cProfile or profile modules,
141 IPython wraps this functionality with magic commands (see :samp:`%prun`
142 and :samp:`%run -p`) convenient for rapid interactive work.
143 * Doctest support. The special :samp:`%doctest_mode` command toggles a mode
144 that allows you to paste existing doctests (with leading :samp:`>>>`
145 prompts and whitespace) and uses doctest-compatible prompts and
146 output, so you can use IPython sessions as doctest code.
60 * Dynamic object introspection. One can access docstrings, function
61 definition prototypes, source code, source files and other details
62 of any object accessible to the interpreter with a single
63 keystroke (:samp:`?`, and using :samp:`??` provides additional detail).
64
65 * Searching through modules and namespaces with :samp:`*` wildcards, both
66 when using the :samp:`?` system and via the :samp:`%psearch` command.
67
68 * Completion in the local namespace, by typing :kbd:`TAB` at the prompt.
69 This works for keywords, modules, methods, variables and files in the
70 current directory. This is supported via the readline library, and
71 full access to configuring readline's behavior is provided.
72 Custom completers can be implemented easily for different purposes
73 (system commands, magic arguments etc.)
74
75 * Numbered input/output prompts with command history (persistent
76 across sessions and tied to each profile), full searching in this
77 history and caching of all input and output.
78
79 * User-extensible 'magic' commands. A set of commands prefixed with
80 :samp:`%` is available for controlling IPython itself and provides
81 directory control, namespace information and many aliases to
82 common system shell commands.
83
84 * Alias facility for defining your own system aliases.
85
86 * Complete system shell access. Lines starting with :samp:`!` are passed
87 directly to the system shell, and using :samp:`!!` or :samp:`var = !cmd`
88 captures shell output into python variables for further use.
89
90 * Background execution of Python commands in a separate thread.
91 IPython has an internal job manager called jobs, and a
92 convenience backgrounding magic function called :samp:`%bg`.
93
94 * The ability to expand python variables when calling the system
95 shell. In a shell command, any python variable prefixed with :samp:`$` is
96 expanded. A double :samp:`$$` allows passing a literal :samp:`$` to the shell (for
97 access to shell and environment variables like :envvar:`PATH`).
98
99 * Filesystem navigation, via a magic :samp:`%cd` command, along with a
100 persistent bookmark system (using :samp:`%bookmark`) for fast access to
101 frequently visited directories.
102
103 * A lightweight persistence framework via the :samp:`%store` command, which
104 allows you to save arbitrary Python variables. These get restored
105 automatically when your session restarts.
106
107 * Automatic indentation (optional) of code as you type (through the
108 readline library).
109
110 * Macro system for quickly re-executing multiple lines of previous
111 input with a single name. Macros can be stored persistently via
112 :samp:`%store` and edited via :samp:`%edit`.
113
114 * Session logging (you can then later use these logs as code in your
115 programs). Logs can optionally timestamp all input, and also store
116 session output (marked as comments, so the log remains valid
117 Python source code).
118
119 * Session restoring: logs can be replayed to restore a previous
120 session to the state where you left it.
121
122 * Verbose and colored exception traceback printouts. Easier to parse
123 visually, and in verbose mode they produce a lot of useful
124 debugging information (basically a terminal version of the cgitb
125 module).
126
127 * Auto-parentheses: callable objects can be executed without
128 parentheses: :samp:`sin 3` is automatically converted to :samp:`sin(3)`.
129
130 * Auto-quoting: using :samp:`,`, or :samp:`;` as the first character forces
131 auto-quoting of the rest of the line: :samp:`,my_function a b` becomes
132 automatically :samp:`my_function("a","b")`, while :samp:`;my_function a b`
133 becomes :samp:`my_function("a b")`.
134
135 * Extensible input syntax. You can define filters that pre-process
136 user input to simplify input in special situations. This allows
137 for example pasting multi-line code fragments which start with
138 :samp:`>>>` or :samp:`...` such as those from other python sessions or the
139 standard Python documentation.
140
141 * Flexible configuration system. It uses a configuration file which
142 allows permanent setting of all command-line options, module
143 loading, code and file execution. The system allows recursive file
144 inclusion, so you can have a base file with defaults and layers
145 which load other customizations for particular projects.
146
147 * Embeddable. You can call IPython as a python shell inside your own
148 python programs. This can be used both for debugging code or for
149 providing interactive abilities to your programs with knowledge
150 about the local namespaces (very useful in debugging and data
151 analysis situations).
152
153 * Easy debugger access. You can set IPython to call up an enhanced
154 version of the Python debugger (pdb) every time there is an
155 uncaught exception. This drops you inside the code which triggered
156 the exception with all the data live and it is possible to
157 navigate the stack to rapidly isolate the source of a bug. The
158 :samp:`%run` magic command (with the :samp:`-d` option) can run any script under
159 pdb's control, automatically setting initial breakpoints for you.
160 This version of pdb has IPython-specific improvements, including
161 tab-completion and traceback coloring support. For even easier
162 debugger access, try :samp:`%debug` after seeing an exception. winpdb is
163 also supported, see ipy_winpdb extension.
164
165 * Profiler support. You can run single statements (similar to
166 :samp:`profile.run()`) or complete programs under the profiler's control.
167 While this is possible with standard cProfile or profile modules,
168 IPython wraps this functionality with magic commands (see :samp:`%prun`
169 and :samp:`%run -p`) convenient for rapid interactive work.
170
171 * Doctest support. The special :samp:`%doctest_mode` command toggles a mode
172 that allows you to paste existing doctests (with leading :samp:`>>>`
173 prompts and whitespace) and uses doctest-compatible prompts and
174 output, so you can use IPython sessions as doctest code.
147 175
148 176 Interactive parallel computing
149 177 ==============================
150 178
151 179 Increasingly, parallel computer hardware, such as multicore CPUs, clusters and supercomputers, is becoming ubiquitous. Over the last 3 years, we have developed an
152 180 architecture within IPython that allows such hardware to be used quickly and easily
153 181 from Python. Moreover, this architecture is designed to support interactive and
154 182 collaborative parallel computing.
155 183
184 The main features of this system are:
185
186 * Quickly parallelize Python code from an interactive Python/IPython session.
187
188 * A flexible and dynamic process model that be deployed on anything from
189 multicore workstations to supercomputers.
190
191 * An architecture that supports many different styles of parallelism, from
192 message passing to task farming. And all of these styles can be handled
193 interactively.
194
195 * Both blocking and fully asynchronous interfaces.
196
197 * High level APIs that enable many things to be parallelized in a few lines
198 of code.
199
200 * Write parallel code that will run unchanged on everything from multicore
201 workstations to supercomputers.
202
203 * Full integration with Message Passing libraries (MPI).
204
205 * Capabilities based security model with full encryption of network connections.
206
207 * Share live parallel jobs with other users securely. We call this collaborative
208 parallel computing.
209
210 * Dynamically load balanced task farming system.
211
212 * Robust error handling. Python exceptions raised in parallel execution are
213 gathered and presented to the top-level code.
214
156 215 For more information, see our :ref:`overview <parallel_index>` of using IPython for
157 216 parallel computing.
158 217
159 218 Portability and Python requirements
160 219 -----------------------------------
161 220
162 221 As of the 0.9 release, IPython requires Python 2.4 or greater. We have
163 222 not begun to test IPython on Python 2.6 or 3.0, but we expect it will
164 223 work with some minor changes.
165 224
166 225 IPython is known to work on the following operating systems:
167 226
168 227 * Linux
169 228 * AIX
170 229 * Most other Unix-like OSs (Solaris, BSD, etc.)
171 230 * Mac OS X
172 231 * Windows (CygWin, XP, Vista, etc.)
173 232
174 233 See :ref:`here <install_index>` for instructions on how to install IPython. No newline at end of file
@@ -1,17 +1,16
1 1 .. _parallel_index:
2 2
3 3 ====================================
4 Using IPython for Parallel computing
4 Using IPython for parallel computing
5 5 ====================================
6 6
7 User Documentation
8 ==================
9
10 7 .. toctree::
11 8 :maxdepth: 2
12 9
13 10 parallel_intro.txt
11 parallel_process.txt
14 12 parallel_multiengine.txt
15 13 parallel_task.txt
16 14 parallel_mpi.txt
15 parallel_security.txt
17 16
@@ -1,242 +1,205
1 1 .. _ip1par:
2 2
3 ======================================
4 Using IPython for parallel computing
5 ======================================
6
7 .. contents::
3 ============================
4 Overview and getting started
5 ============================
8 6
9 7 Introduction
10 8 ============
11 9
12 This file gives an overview of IPython. IPython has a sophisticated and
13 powerful architecture for parallel and distributed computing. This
14 architecture abstracts out parallelism in a very general way, which
15 enables IPython to support many different styles of parallelism
16 including:
10 This section gives an overview of IPython's sophisticated and powerful
11 architecture for parallel and distributed computing. This architecture
12 abstracts out parallelism in a very general way, which enables IPython to
13 support many different styles of parallelism including:
17 14
18 * Single program, multiple data (SPMD) parallelism.
19 * Multiple program, multiple data (MPMD) parallelism.
20 * Message passing using ``MPI``.
21 * Task farming.
22 * Data parallel.
23 * Combinations of these approaches.
24 * Custom user defined approaches.
15 * Single program, multiple data (SPMD) parallelism.
16 * Multiple program, multiple data (MPMD) parallelism.
17 * Message passing using MPI.
18 * Task farming.
19 * Data parallel.
20 * Combinations of these approaches.
21 * Custom user defined approaches.
25 22
26 23 Most importantly, IPython enables all types of parallel applications to
27 24 be developed, executed, debugged and monitored *interactively*. Hence,
28 25 the ``I`` in IPython. The following are some example usage cases for IPython:
29 26
30 * Quickly parallelize algorithms that are embarrassingly parallel
31 using a number of simple approaches. Many simple things can be
32 parallelized interactively in one or two lines of code.
33 * Steer traditional MPI applications on a supercomputer from an
34 IPython session on your laptop.
35 * Analyze and visualize large datasets (that could be remote and/or
36 distributed) interactively using IPython and tools like
37 matplotlib/TVTK.
38 * Develop, test and debug new parallel algorithms
39 (that may use MPI) interactively.
40 * Tie together multiple MPI jobs running on different systems into
41 one giant distributed and parallel system.
42 * Start a parallel job on your cluster and then have a remote
43 collaborator connect to it and pull back data into their
44 local IPython session for plotting and analysis.
45 * Run a set of tasks on a set of CPUs using dynamic load balancing.
27 * Quickly parallelize algorithms that are embarrassingly parallel
28 using a number of simple approaches. Many simple things can be
29 parallelized interactively in one or two lines of code.
30
31 * Steer traditional MPI applications on a supercomputer from an
32 IPython session on your laptop.
33
34 * Analyze and visualize large datasets (that could be remote and/or
35 distributed) interactively using IPython and tools like
36 matplotlib/TVTK.
37
38 * Develop, test and debug new parallel algorithms
39 (that may use MPI) interactively.
40
41 * Tie together multiple MPI jobs running on different systems into
42 one giant distributed and parallel system.
43
44 * Start a parallel job on your cluster and then have a remote
45 collaborator connect to it and pull back data into their
46 local IPython session for plotting and analysis.
47
48 * Run a set of tasks on a set of CPUs using dynamic load balancing.
46 49
47 50 Architecture overview
48 51 =====================
49 52
50 53 The IPython architecture consists of three components:
51 54
52 * The IPython engine.
53 * The IPython controller.
54 * Various controller Clients.
55 * The IPython engine.
56 * The IPython controller.
57 * Various controller clients.
58
59 These components live in the :mod:`IPython.kernel` package and are
60 installed with IPython. They do, however, have additional dependencies
61 that must be installed. For more information, see our
62 :ref:`installation documentation <install_index>`.
55 63
56 64 IPython engine
57 65 ---------------
58 66
59 67 The IPython engine is a Python instance that takes Python commands over a
60 68 network connection. Eventually, the IPython engine will be a full IPython
61 69 interpreter, but for now, it is a regular Python interpreter. The engine
62 70 can also handle incoming and outgoing Python objects sent over a network
63 71 connection. When multiple engines are started, parallel and distributed
64 72 computing becomes possible. An important feature of an IPython engine is
65 73 that it blocks while user code is being executed. Read on for how the
66 74 IPython controller solves this problem to expose a clean asynchronous API
67 75 to the user.
68 76
69 77 IPython controller
70 78 ------------------
71 79
72 80 The IPython controller provides an interface for working with a set of
73 81 engines. At an general level, the controller is a process to which
74 82 IPython engines can connect. For each connected engine, the controller
75 83 manages a queue. All actions that can be performed on the engine go
76 84 through this queue. While the engines themselves block when user code is
77 85 run, the controller hides that from the user to provide a fully
78 asynchronous interface to a set of engines. Because the controller
79 listens on a network port for engines to connect to it, it must be
80 started before any engines are started.
86 asynchronous interface to a set of engines.
87
88 .. note::
89
90 Because the controller listens on a network port for engines to
91 connect to it, it must be started *before* any engines are started.
81 92
82 93 The controller also provides a single point of contact for users who wish
83 94 to utilize the engines connected to the controller. There are different
84 95 ways of working with a controller. In IPython these ways correspond to different interfaces that the controller is adapted to. Currently we have two default interfaces to the controller:
85 96
86 * The MultiEngine interface.
87 * The Task interface.
97 * The MultiEngine interface, which provides the simplest possible way of
98 working with engines interactively.
99 * The Task interface, which provides presents the engines as a load balanced
100 task farming system.
88 101
89 102 Advanced users can easily add new custom interfaces to enable other
90 103 styles of parallelism.
91 104
92 105 .. note::
93 106
94 107 A single controller and set of engines can be accessed
95 108 through multiple interfaces simultaneously. This opens the
96 109 door for lots of interesting things.
97 110
98 111 Controller clients
99 112 ------------------
100 113
101 114 For each controller interface, there is a corresponding client. These
102 115 clients allow users to interact with a set of engines through the
103 interface.
116 interface. Here are the two default clients:
117
118 * The :class:`MultiEngineClient` class.
119 * The :class:`TaskClient` class.
104 120
105 121 Security
106 122 --------
107 123
108 By default (as long as `pyOpenSSL` is installed) all network connections between the controller and engines and the controller and clients are secure. What does this mean? First of all, all of the connections will be encrypted using SSL. Second, the connections are authenticated. We handle authentication in a `capabilities`__ based security model. In this model, a "capability (known in some systems as a key) is a communicable, unforgeable token of authority". Put simply, a capability is like a key to your house. If you have the key to your house, you can get in, if not you can't.
109
110 .. __: http://en.wikipedia.org/wiki/Capability-based_security
124 By default (as long as `pyOpenSSL` is installed) all network connections between the controller and engines and the controller and clients are secure. What does this mean? First of all, all of the connections will be encrypted using SSL. Second, the connections are authenticated. We handle authentication in a capability based security model [Capability]_. In this model, a "capability (known in some systems as a key) is a communicable, unforgeable token of authority". Put simply, a capability is like a key to your house. If you have the key to your house, you can get in. If not, you can't.
111 125
112 In our architecture, the controller is the only process that listens on network ports, and is thus responsible to creating these keys. In IPython, these keys are known as Foolscap URLs, or FURLs, because of the underlying network protocol we are using. As a user, you don't need to know anything about the details of these FURLs, other than that when the controller starts, it saves a set of FURLs to files named something.furl. The default location of these files is your ~./ipython directory.
126 In our architecture, the controller is the only process that listens on network ports, and is thus responsible to creating these keys. In IPython, these keys are known as Foolscap URLs, or FURLs, because of the underlying network protocol we are using. As a user, you don't need to know anything about the details of these FURLs, other than that when the controller starts, it saves a set of FURLs to files named :file:`something.furl`. The default location of these files is the :file:`~./ipython/security` directory.
113 127
114 To connect and authenticate to the controller an engine or client simply needs to present an appropriate furl (that was originally created by the controller) to the controller. Thus, the .furl files need to be copied to a location where the clients and engines can find them. Typically, this is the ~./ipython directory on the host where the client/engine is running (which could be a different host than the controller). Once the .furl files are copied over, everything should work fine.
128 To connect and authenticate to the controller an engine or client simply needs to present an appropriate FURL (that was originally created by the controller) to the controller. Thus, the FURL files need to be copied to a location where the clients and engines can find them. Typically, this is the :file:`~./ipython/security` directory on the host where the client/engine is running (which could be a different host than the controller). Once the FURL files are copied over, everything should work fine.
115 129
116 Getting Started
117 ===============
118
119 To use IPython for parallel computing, you need to start one instance of
120 the controller and one or more instances of the engine. The controller
121 and each engine can run on different machines or on the same machine.
122 Because of this, there are many different possibilities for setting up
123 the IP addresses and ports used by the various processes.
124
125 Starting the controller and engine on your local machine
126 --------------------------------------------------------
127
128 This is the simplest configuration that can be used and is useful for
129 testing the system and on machines that have multiple cores and/or
130 multple CPUs. The easiest way of doing this is using the ``ipcluster``
131 command::
130 Currently, there are three FURL files that the controller creates:
132 131
133 $ ipcluster -n 4
134
135 This will start an IPython controller and then 4 engines that connect to
136 the controller. Lastly, the script will print out the Python commands
137 that you can use to connect to the controller. It is that easy.
138
139 Underneath the hood, the ``ipcluster`` script uses two other top-level
140 scripts that you can also use yourself. These scripts are
141 ``ipcontroller``, which starts the controller and ``ipengine`` which
142 starts one engine. To use these scripts to start things on your local
143 machine, do the following.
144
145 First start the controller::
132 ipcontroller-engine.furl
133 This FURL file is the key that gives an engine the ability to connect
134 to a controller.
146 135
147 $ ipcontroller &
148
149 Next, start however many instances of the engine you want using (repeatedly) the command::
150
151 $ ipengine &
136 ipcontroller-tc.furl
137 This FURL file is the key that a :class:`TaskClient` must use to
138 connect to the task interface of a controller.
139
140 ipcontroller-mec.furl
141 This FURL file is the key that a :class:`MultiEngineClient` must use
142 to connect to the multiengine interface of a controller.
152 143
153 .. warning::
154
155 The order of the above operations is very important. You *must*
156 start the controller before the engines, since the engines connect
157 to the controller as they get started.
144 More details of how these FURL files are used are given below.
158 145
159 On some platforms you may need to give these commands in the form
160 ``(ipcontroller &)`` and ``(ipengine &)`` for them to work properly. The
161 engines should start and automatically connect to the controller on the
162 default ports, which are chosen for this type of setup. You are now ready
163 to use the controller and engines from IPython.
146 A detailed description of the security model and its implementation in IPython
147 can be found :ref:`here <parallelsecurity>`.
164 148
165 Starting the controller and engines on different machines
166 ---------------------------------------------------------
167
168 This section needs to be updated to reflect the new Foolscap capabilities based
169 model.
170
171 Using ``ipcluster`` with ``ssh``
172 --------------------------------
173
174 The ``ipcluster`` command can also start a controller and engines using
175 ``ssh``. We need more documentation on this, but for now here is any
176 example startup script::
177
178 controller = dict(host='myhost',
179 engine_port=None, # default is 10105
180 control_port=None,
181 )
182
183 # keys are hostnames, values are the number of engine on that host
184 engines = dict(node1=2,
185 node2=2,
186 node3=2,
187 node3=2,
188 )
189
190 Starting engines using ``mpirun``
191 ---------------------------------
192
193 The IPython engines can be started using ``mpirun``/``mpiexec``, even if
194 the engines don't call MPI_Init() or use the MPI API in any way. This is
195 supported on modern MPI implementations like `Open MPI`_.. This provides
196 an really nice way of starting a bunch of engine. On a system with MPI
197 installed you can do::
198
199 mpirun -n 4 ipengine --controller-port=10000 --controller-ip=host0
200
201 .. _Open MPI: http://www.open-mpi.org/
202
203 More details on using MPI with IPython can be found :ref:`here <parallelmpi>`.
149 Getting Started
150 ===============
204 151
205 Log files
206 ---------
152 To use IPython for parallel computing, you need to start one instance of
153 the controller and one or more instances of the engine. Initially, it is best to simply start a controller and engines on a single host using the :command:`ipcluster` command. To start a controller and 4 engines on you localhost, just do::
207 154
208 All of the components of IPython have log files associated with them.
209 These log files can be extremely useful in debugging problems with
210 IPython and can be found in the directory ``~/.ipython/log``. Sending
211 the log files to us will often help us to debug any problems.
155 $ ipcluster local -n 4
212 156
213 Next Steps
214 ==========
157 More details about starting the IPython controller and engines can be found :ref:`here <parallel_process>`
215 158
216 159 Once you have started the IPython controller and one or more engines, you
217 are ready to use the engines to do somnething useful. To make sure
218 everything is working correctly, try the following commands::
160 are ready to use the engines to do something useful. To make sure
161 everything is working correctly, try the following commands:
162
163 .. sourcecode:: ipython
219 164
220 165 In [1]: from IPython.kernel import client
221 166
222 In [2]: mec = client.MultiEngineClient() # This looks for .furl files in ~./ipython
167 In [2]: mec = client.MultiEngineClient()
223 168
224 169 In [4]: mec.get_ids()
225 170 Out[4]: [0, 1, 2, 3]
226 171
227 172 In [5]: mec.execute('print "Hello World"')
228 173 Out[5]:
229 174 <Results List>
230 175 [0] In [1]: print "Hello World"
231 176 [0] Out[1]: Hello World
232 177
233 178 [1] In [1]: print "Hello World"
234 179 [1] Out[1]: Hello World
235 180
236 181 [2] In [1]: print "Hello World"
237 182 [2] Out[1]: Hello World
238 183
239 184 [3] In [1]: print "Hello World"
240 185 [3] Out[1]: Hello World
241 186
242 If this works, you are ready to learn more about the :ref:`MultiEngine <parallelmultiengine>` and :ref:`Task <paralleltask>` interfaces to the controller.
187 Remember, a client also needs to present a FURL file to the controller. How does this happen? When a multiengine client is created with no arguments, the client tries to find the corresponding FURL file in the local :file:`~./ipython/security` directory. If it finds it, you are set. If you have put the FURL file in a different location or it has a different name, create the client like this::
188
189 mec = client.MultiEngineClient('/path/to/my/ipcontroller-mec.furl')
190
191 Same thing hold true of creating a task client::
192
193 tc = client.TaskClient('/path/to/my/ipcontroller-tc.furl')
194
195 You are now ready to learn more about the :ref:`MultiEngine <parallelmultiengine>` and :ref:`Task <paralleltask>` interfaces to the controller.
196
197 .. note::
198
199 Don't forget that the engine, multiengine client and task client all have
200 *different* furl files. You must move *each* of these around to an
201 appropriate location so that the engines and clients can use them to
202 connect to the controller.
203
204 .. [Capability] Capability-based security, http://en.wikipedia.org/wiki/Capability-based_security
205
@@ -1,22 +1,157
1 1 .. _parallelmpi:
2 2
3 3 =======================
4 4 Using MPI with IPython
5 5 =======================
6 6
7 The simplest way of getting started with MPI is to install an MPI implementation
8 (we recommend `Open MPI`_) and `mpi4py`_ and then start the engines using the
9 ``mpirun`` command::
10
11 mpirun -n 4 ipengine --mpi=mpi4py
12
13 This will automatically import `mpi4py`_ and make sure that `MPI_Init` is called
14 at the right time. We also have built in support for `PyTrilinos`_, which can be
15 used (assuming `PyTrilinos`_ is installed) by starting the engines with::
16
17 mpirun -n 4 ipengine --mpi=pytrilinos
18
19 .. _MPI: http://www-unix.mcs.anl.gov/mpi/
20 .. _mpi4py: http://mpi4py.scipy.org/
21 .. _Open MPI: http://www.open-mpi.org/
22 .. _PyTrilinos: http://trilinos.sandia.gov/packages/pytrilinos/ No newline at end of file
7 Often, a parallel algorithm will require moving data between the engines. One way of accomplishing this is by doing a pull and then a push using the multiengine client. However, this will be slow as all the data has to go through the controller to the client and then back through the controller, to its final destination.
8
9 A much better way of moving data between engines is to use a message passing library, such as the Message Passing Interface (MPI) [MPI]_. IPython's parallel computing architecture has been designed from the ground up to integrate with MPI. This document describes how to use MPI with IPython.
10
11 Additional installation requirements
12 ====================================
13
14 If you want to use MPI with IPython, you will need to install:
15
16 * A standard MPI implementation such as OpenMPI [OpenMPI]_ or MPICH.
17 * The mpi4py [mpi4py]_ package.
18
19 .. note::
20
21 The mpi4py package is not a strict requirement. However, you need to
22 have *some* way of calling MPI from Python. You also need some way of
23 making sure that :func:`MPI_Init` is called when the IPython engines start
24 up. There are a number of ways of doing this and a good number of
25 associated subtleties. We highly recommend just using mpi4py as it
26 takes care of most of these problems. If you want to do something
27 different, let us know and we can help you get started.
28
29 Starting the engines with MPI enabled
30 =====================================
31
32 To use code that calls MPI, there are typically two things that MPI requires.
33
34 1. The process that wants to call MPI must be started using
35 :command:`mpirun` or a batch system (like PBS) that has MPI support.
36 2. Once the process starts, it must call :func:`MPI_Init`.
37
38 There are a couple of ways that you can start the IPython engines and get these things to happen.
39
40 Automatic starting using :command:`mpirun` and :command:`ipcluster`
41 -------------------------------------------------------------------
42
43 The easiest approach is to use the `mpirun` mode of :command:`ipcluster`, which will first start a controller and then a set of engines using :command:`mpirun`::
44
45 $ ipcluster mpirun -n 4
46
47 This approach is best as interrupting :command:`ipcluster` will automatically
48 stop and clean up the controller and engines.
49
50 Manual starting using :command:`mpirun`
51 ---------------------------------------
52
53 If you want to start the IPython engines using the :command:`mpirun`, just do::
54
55 $ mpirun -n 4 ipengine --mpi=mpi4py
56
57 This requires that you already have a controller running and that the FURL
58 files for the engines are in place. We also have built in support for
59 PyTrilinos [PyTrilinos]_, which can be used (assuming is installed) by
60 starting the engines with::
61
62 mpirun -n 4 ipengine --mpi=pytrilinos
63
64 Automatic starting using PBS and :command:`ipcluster`
65 -----------------------------------------------------
66
67 The :command:`ipcluster` command also has built-in integration with PBS. For more information on this approach, see our documentation on :ref:`ipcluster <parallel_process>`.
68
69 Actually using MPI
70 ==================
71
72 Once the engines are running with MPI enabled, you are ready to go. You can now call any code that uses MPI in the IPython engines. And, all of this can be done interactively. Here we show a simple example that uses mpi4py [mpi4py]_.
73
74 First, lets define a simply function that uses MPI to calculate the sum of a distributed array. Save the following text in a file called :file:`psum.py`:
75
76 .. sourcecode:: python
77
78 from mpi4py import MPI
79 import numpy as np
80
81 def psum(a):
82 s = np.sum(a)
83 return MPI.COMM_WORLD.Allreduce(s,MPI.SUM)
84
85 Now, start an IPython cluster in the same directory as :file:`psum.py`::
86
87 $ ipcluster mpirun -n 4
88
89 Finally, connect to the cluster and use this function interactively. In this case, we create a random array on each engine and sum up all the random arrays using our :func:`psum` function:
90
91 .. sourcecode:: ipython
92
93 In [1]: from IPython.kernel import client
94
95 In [2]: mec = client.MultiEngineClient()
96
97 In [3]: mec.activate()
98
99 In [4]: px import numpy as np
100 Parallel execution on engines: all
101 Out[4]:
102 <Results List>
103 [0] In [13]: import numpy as np
104 [1] In [13]: import numpy as np
105 [2] In [13]: import numpy as np
106 [3] In [13]: import numpy as np
107
108 In [6]: px a = np.random.rand(100)
109 Parallel execution on engines: all
110 Out[6]:
111 <Results List>
112 [0] In [15]: a = np.random.rand(100)
113 [1] In [15]: a = np.random.rand(100)
114 [2] In [15]: a = np.random.rand(100)
115 [3] In [15]: a = np.random.rand(100)
116
117 In [7]: px from psum import psum
118 Parallel execution on engines: all
119 Out[7]:
120 <Results List>
121 [0] In [16]: from psum import psum
122 [1] In [16]: from psum import psum
123 [2] In [16]: from psum import psum
124 [3] In [16]: from psum import psum
125
126 In [8]: px s = psum(a)
127 Parallel execution on engines: all
128 Out[8]:
129 <Results List>
130 [0] In [17]: s = psum(a)
131 [1] In [17]: s = psum(a)
132 [2] In [17]: s = psum(a)
133 [3] In [17]: s = psum(a)
134
135 In [9]: px print s
136 Parallel execution on engines: all
137 Out[9]:
138 <Results List>
139 [0] In [18]: print s
140 [0] Out[18]: 187.451545803
141
142 [1] In [18]: print s
143 [1] Out[18]: 187.451545803
144
145 [2] In [18]: print s
146 [2] Out[18]: 187.451545803
147
148 [3] In [18]: print s
149 [3] Out[18]: 187.451545803
150
151 Any Python code that makes calls to MPI can be used in this manner, including
152 compiled C, C++ and Fortran libraries that have been exposed to Python.
153
154 .. [MPI] Message Passing Interface. http://www-unix.mcs.anl.gov/mpi/
155 .. [mpi4py] MPI for Python. mpi4py: http://mpi4py.scipy.org/
156 .. [OpenMPI] Open MPI. http://www.open-mpi.org/
157 .. [PyTrilinos] PyTrilinos. http://trilinos.sandia.gov/packages/pytrilinos/ No newline at end of file
@@ -1,728 +1,828
1 1 .. _parallelmultiengine:
2 2
3 =================================
4 IPython's MultiEngine interface
5 =================================
6
7 .. contents::
8
9 The MultiEngine interface represents one possible way of working with a
10 set of IPython engines. The basic idea behind the MultiEngine interface is
11 that the capabilities of each engine are explicitly exposed to the user.
12 Thus, in the MultiEngine interface, each engine is given an id that is
13 used to identify the engine and give it work to do. This interface is very
14 intuitive and is designed with interactive usage in mind, and is thus the
15 best place for new users of IPython to begin.
3 ===============================
4 IPython's multiengine interface
5 ===============================
6
7 The multiengine interface represents one possible way of working with a set of
8 IPython engines. The basic idea behind the multiengine interface is that the
9 capabilities of each engine are directly and explicitly exposed to the user.
10 Thus, in the multiengine interface, each engine is given an id that is used to
11 identify the engine and give it work to do. This interface is very intuitive
12 and is designed with interactive usage in mind, and is thus the best place for
13 new users of IPython to begin.
16 14
17 15 Starting the IPython controller and engines
18 16 ===========================================
19 17
20 18 To follow along with this tutorial, you will need to start the IPython
21 controller and four IPython engines. The simplest way of doing this is to
22 use the ``ipcluster`` command::
19 controller and four IPython engines. The simplest way of doing this is to use
20 the :command:`ipcluster` command::
23 21
24 $ ipcluster -n 4
22 $ ipcluster local -n 4
25 23
26 For more detailed information about starting the controller and engines, see our :ref:`introduction <ip1par>` to using IPython for parallel computing.
24 For more detailed information about starting the controller and engines, see
25 our :ref:`introduction <ip1par>` to using IPython for parallel computing.
27 26
28 27 Creating a ``MultiEngineClient`` instance
29 28 =========================================
30 29
31 The first step is to import the IPython ``client`` module and then create a ``MultiEngineClient`` instance::
30 The first step is to import the IPython :mod:`IPython.kernel.client` module
31 and then create a :class:`MultiEngineClient` instance:
32
33 .. sourcecode:: ipython
32 34
33 35 In [1]: from IPython.kernel import client
34 36
35 37 In [2]: mec = client.MultiEngineClient()
36 38
37 To make sure there are engines connected to the controller, use can get a list of engine ids::
39 This form assumes that the :file:`ipcontroller-mec.furl` is in the
40 :file:`~./ipython/security` directory on the client's host. If not, the
41 location of the FURL file must be given as an argument to the
42 constructor:
43
44 .. sourcecode:: ipython
45
46 In [2]: mec = client.MultiEngineClient('/path/to/my/ipcontroller-mec.furl')
47
48 To make sure there are engines connected to the controller, use can get a list
49 of engine ids:
50
51 .. sourcecode:: ipython
38 52
39 53 In [3]: mec.get_ids()
40 54 Out[3]: [0, 1, 2, 3]
41 55
42 56 Here we see that there are four engines ready to do work for us.
43 57
58 Quick and easy parallelism
59 ==========================
60
61 In many cases, you simply want to apply a Python function to a sequence of objects, but *in parallel*. The multiengine interface provides two simple ways of accomplishing this: a parallel version of :func:`map` and ``@parallel`` function decorator.
62
63 Parallel map
64 ------------
65
66 Python's builtin :func:`map` functions allows a function to be applied to a
67 sequence element-by-element. This type of code is typically trivial to
68 parallelize. In fact, the multiengine interface in IPython already has a
69 parallel version of :meth:`map` that works just like its serial counterpart:
70
71 .. sourcecode:: ipython
72
73 In [63]: serial_result = map(lambda x:x**10, range(32))
74
75 In [64]: parallel_result = mec.map(lambda x:x**10, range(32))
76
77 In [65]: serial_result==parallel_result
78 Out[65]: True
79
80 .. note::
81
82 The multiengine interface version of :meth:`map` does not do any load
83 balancing. For a load balanced version, see the task interface.
84
85 .. seealso::
86
87 The :meth:`map` method has a number of options that can be controlled by
88 the :meth:`mapper` method. See its docstring for more information.
89
90 Parallel function decorator
91 ---------------------------
92
93 Parallel functions are just like normal function, but they can be called on sequences and *in parallel*. The multiengine interface provides a decorator that turns any Python function into a parallel function:
94
95 .. sourcecode:: ipython
96
97 In [10]: @mec.parallel()
98 ....: def f(x):
99 ....: return 10.0*x**4
100 ....:
101
102 In [11]: f(range(32)) # this is done in parallel
103 Out[11]:
104 [0.0,10.0,160.0,...]
105
106 See the docstring for the :meth:`parallel` decorator for options.
107
44 108 Running Python commands
45 109 =======================
46 110
47 The most basic type of operation that can be performed on the engines is to execute Python code. Executing Python code can be done in blocking or non-blocking mode (blocking is default) using the ``execute`` method.
111 The most basic type of operation that can be performed on the engines is to
112 execute Python code. Executing Python code can be done in blocking or
113 non-blocking mode (blocking is default) using the :meth:`execute` method.
48 114
49 115 Blocking execution
50 116 ------------------
51 117
52 In blocking mode, the ``MultiEngineClient`` object (called ``mec`` in
118 In blocking mode, the :class:`MultiEngineClient` object (called ``mec`` in
53 119 these examples) submits the command to the controller, which places the
54 command in the engines' queues for execution. The ``execute`` call then
55 blocks until the engines are done executing the command::
120 command in the engines' queues for execution. The :meth:`execute` call then
121 blocks until the engines are done executing the command:
122
123 .. sourcecode:: ipython
56 124
57 125 # The default is to run on all engines
58 126 In [4]: mec.execute('a=5')
59 127 Out[4]:
60 128 <Results List>
61 129 [0] In [1]: a=5
62 130 [1] In [1]: a=5
63 131 [2] In [1]: a=5
64 132 [3] In [1]: a=5
65 133
66 134 In [5]: mec.execute('b=10')
67 135 Out[5]:
68 136 <Results List>
69 137 [0] In [2]: b=10
70 138 [1] In [2]: b=10
71 139 [2] In [2]: b=10
72 140 [3] In [2]: b=10
73 141
74 Python commands can be executed on specific engines by calling execute using the ``targets`` keyword argument::
142 Python commands can be executed on specific engines by calling execute using
143 the ``targets`` keyword argument:
144
145 .. sourcecode:: ipython
75 146
76 147 In [6]: mec.execute('c=a+b',targets=[0,2])
77 148 Out[6]:
78 149 <Results List>
79 150 [0] In [3]: c=a+b
80 151 [2] In [3]: c=a+b
81 152
82 153
83 154 In [7]: mec.execute('c=a-b',targets=[1,3])
84 155 Out[7]:
85 156 <Results List>
86 157 [1] In [3]: c=a-b
87 158 [3] In [3]: c=a-b
88 159
89 160
90 161 In [8]: mec.execute('print c')
91 162 Out[8]:
92 163 <Results List>
93 164 [0] In [4]: print c
94 165 [0] Out[4]: 15
95 166
96 167 [1] In [4]: print c
97 168 [1] Out[4]: -5
98 169
99 170 [2] In [4]: print c
100 171 [2] Out[4]: 15
101 172
102 173 [3] In [4]: print c
103 174 [3] Out[4]: -5
104 175
105 This example also shows one of the most important things about the IPython engines: they have a persistent user namespaces. The ``execute`` method returns a Python ``dict`` that contains useful information::
176 This example also shows one of the most important things about the IPython
177 engines: they have a persistent user namespaces. The :meth:`execute` method
178 returns a Python ``dict`` that contains useful information:
179
180 .. sourcecode:: ipython
106 181
107 182 In [9]: result_dict = mec.execute('d=10; print d')
108 183
109 184 In [10]: for r in result_dict:
110 185 ....: print r
111 186 ....:
112 187 ....:
113 188 {'input': {'translated': 'd=10; print d', 'raw': 'd=10; print d'}, 'number': 5, 'id': 0, 'stdout': '10\n'}
114 189 {'input': {'translated': 'd=10; print d', 'raw': 'd=10; print d'}, 'number': 5, 'id': 1, 'stdout': '10\n'}
115 190 {'input': {'translated': 'd=10; print d', 'raw': 'd=10; print d'}, 'number': 5, 'id': 2, 'stdout': '10\n'}
116 191 {'input': {'translated': 'd=10; print d', 'raw': 'd=10; print d'}, 'number': 5, 'id': 3, 'stdout': '10\n'}
117 192
118 193 Non-blocking execution
119 194 ----------------------
120 195
121 In non-blocking mode, ``execute`` submits the command to be executed and then returns a
122 ``PendingResult`` object immediately. The ``PendingResult`` object gives you a way of getting a
123 result at a later time through its ``get_result`` method or ``r`` attribute. This allows you to
124 quickly submit long running commands without blocking your local Python/IPython session::
196 In non-blocking mode, :meth:`execute` submits the command to be executed and
197 then returns a :class:`PendingResult` object immediately. The
198 :class:`PendingResult` object gives you a way of getting a result at a later
199 time through its :meth:`get_result` method or :attr:`r` attribute. This allows
200 you to quickly submit long running commands without blocking your local
201 Python/IPython session:
202
203 .. sourcecode:: ipython
125 204
126 205 # In blocking mode
127 206 In [6]: mec.execute('import time')
128 207 Out[6]:
129 208 <Results List>
130 209 [0] In [1]: import time
131 210 [1] In [1]: import time
132 211 [2] In [1]: import time
133 212 [3] In [1]: import time
134 213
135 214 # In non-blocking mode
136 215 In [7]: pr = mec.execute('time.sleep(10)',block=False)
137 216
138 217 # Now block for the result
139 218 In [8]: pr.get_result()
140 219 Out[8]:
141 220 <Results List>
142 221 [0] In [2]: time.sleep(10)
143 222 [1] In [2]: time.sleep(10)
144 223 [2] In [2]: time.sleep(10)
145 224 [3] In [2]: time.sleep(10)
146 225
147 226 # Again in non-blocking mode
148 227 In [9]: pr = mec.execute('time.sleep(10)',block=False)
149 228
150 229 # Poll to see if the result is ready
151 230 In [10]: pr.get_result(block=False)
152 231
153 232 # A shorthand for get_result(block=True)
154 233 In [11]: pr.r
155 234 Out[11]:
156 235 <Results List>
157 236 [0] In [3]: time.sleep(10)
158 237 [1] In [3]: time.sleep(10)
159 238 [2] In [3]: time.sleep(10)
160 239 [3] In [3]: time.sleep(10)
161 240
162 Often, it is desirable to wait until a set of ``PendingResult`` objects are done. For this, there is a the method ``barrier``. This method takes a tuple of ``PendingResult`` objects and blocks until all of the associated results are ready::
241 Often, it is desirable to wait until a set of :class:`PendingResult` objects
242 are done. For this, there is a the method :meth:`barrier`. This method takes a
243 tuple of :class:`PendingResult` objects and blocks until all of the associated
244 results are ready:
245
246 .. sourcecode:: ipython
163 247
164 248 In [72]: mec.block=False
165 249
166 250 # A trivial list of PendingResults objects
167 251 In [73]: pr_list = [mec.execute('time.sleep(3)') for i in range(10)]
168 252
169 253 # Wait until all of them are done
170 254 In [74]: mec.barrier(pr_list)
171 255
172 256 # Then, their results are ready using get_result or the r attribute
173 257 In [75]: pr_list[0].r
174 258 Out[75]:
175 259 <Results List>
176 260 [0] In [20]: time.sleep(3)
177 261 [1] In [19]: time.sleep(3)
178 262 [2] In [20]: time.sleep(3)
179 263 [3] In [19]: time.sleep(3)
180 264
181 265
182 266 The ``block`` and ``targets`` keyword arguments and attributes
183 267 --------------------------------------------------------------
184 268
185 Most commands in the multiengine interface (like ``execute``) accept ``block`` and ``targets``
186 as keyword arguments. As we have seen above, these keyword arguments control the blocking mode
187 and which engines the command is applied to. The ``MultiEngineClient`` class also has ``block``
188 and ``targets`` attributes that control the default behavior when the keyword arguments are not
189 provided. Thus the following logic is used for ``block`` and ``targets``:
269 Most methods in the multiengine interface (like :meth:`execute`) accept
270 ``block`` and ``targets`` as keyword arguments. As we have seen above, these
271 keyword arguments control the blocking mode and which engines the command is
272 applied to. The :class:`MultiEngineClient` class also has :attr:`block` and
273 :attr:`targets` attributes that control the default behavior when the keyword
274 arguments are not provided. Thus the following logic is used for :attr:`block`
275 and :attr:`targets`:
276
277 * If no keyword argument is provided, the instance attributes are used.
278 * Keyword argument, if provided override the instance attributes.
190 279
191 * If no keyword argument is provided, the instance attributes are used.
192 * Keyword argument, if provided override the instance attributes.
280 The following examples demonstrate how to use the instance attributes:
193 281
194 The following examples demonstrate how to use the instance attributes::
282 .. sourcecode:: ipython
195 283
196 284 In [16]: mec.targets = [0,2]
197 285
198 286 In [17]: mec.block = False
199 287
200 288 In [18]: pr = mec.execute('a=5')
201 289
202 290 In [19]: pr.r
203 291 Out[19]:
204 292 <Results List>
205 293 [0] In [6]: a=5
206 294 [2] In [6]: a=5
207 295
208 296 # Note targets='all' means all engines
209 297 In [20]: mec.targets = 'all'
210 298
211 299 In [21]: mec.block = True
212 300
213 301 In [22]: mec.execute('b=10; print b')
214 302 Out[22]:
215 303 <Results List>
216 304 [0] In [7]: b=10; print b
217 305 [0] Out[7]: 10
218 306
219 307 [1] In [6]: b=10; print b
220 308 [1] Out[6]: 10
221 309
222 310 [2] In [7]: b=10; print b
223 311 [2] Out[7]: 10
224 312
225 313 [3] In [6]: b=10; print b
226 314 [3] Out[6]: 10
227 315
228 The ``block`` and ``targets`` instance attributes also determine the behavior of the parallel
229 magic commands...
316 The :attr:`block` and :attr:`targets` instance attributes also determine the
317 behavior of the parallel magic commands.
230 318
231 319
232 320 Parallel magic commands
233 321 -----------------------
234 322
235 We provide a few IPython magic commands (``%px``, ``%autopx`` and ``%result``) that make it more pleasant to execute Python commands on the engines interactively. These are simply shortcuts to ``execute`` and ``get_result``. The ``%px`` magic executes a single Python command on the engines specified by the `magicTargets``targets` attribute of the ``MultiEngineClient`` instance (by default this is 'all')::
323 We provide a few IPython magic commands (``%px``, ``%autopx`` and ``%result``)
324 that make it more pleasant to execute Python commands on the engines
325 interactively. These are simply shortcuts to :meth:`execute` and
326 :meth:`get_result`. The ``%px`` magic executes a single Python command on the
327 engines specified by the :attr:`targets` attribute of the
328 :class:`MultiEngineClient` instance (by default this is ``'all'``):
329
330 .. sourcecode:: ipython
236 331
237 332 # Make this MultiEngineClient active for parallel magic commands
238 333 In [23]: mec.activate()
239 334
240 335 In [24]: mec.block=True
241 336
242 337 In [25]: import numpy
243 338
244 339 In [26]: %px import numpy
245 340 Executing command on Controller
246 341 Out[26]:
247 342 <Results List>
248 343 [0] In [8]: import numpy
249 344 [1] In [7]: import numpy
250 345 [2] In [8]: import numpy
251 346 [3] In [7]: import numpy
252 347
253 348
254 349 In [27]: %px a = numpy.random.rand(2,2)
255 350 Executing command on Controller
256 351 Out[27]:
257 352 <Results List>
258 353 [0] In [9]: a = numpy.random.rand(2,2)
259 354 [1] In [8]: a = numpy.random.rand(2,2)
260 355 [2] In [9]: a = numpy.random.rand(2,2)
261 356 [3] In [8]: a = numpy.random.rand(2,2)
262 357
263 358
264 359 In [28]: %px print numpy.linalg.eigvals(a)
265 360 Executing command on Controller
266 361 Out[28]:
267 362 <Results List>
268 363 [0] In [10]: print numpy.linalg.eigvals(a)
269 364 [0] Out[10]: [ 1.28167017 0.14197338]
270 365
271 366 [1] In [9]: print numpy.linalg.eigvals(a)
272 367 [1] Out[9]: [-0.14093616 1.27877273]
273 368
274 369 [2] In [10]: print numpy.linalg.eigvals(a)
275 370 [2] Out[10]: [-0.37023573 1.06779409]
276 371
277 372 [3] In [9]: print numpy.linalg.eigvals(a)
278 373 [3] Out[9]: [ 0.83664764 -0.25602658]
279 374
280 The ``%result`` magic gets and prints the stdin/stdout/stderr of the last command executed on each engine. It is simply a shortcut to the ``get_result`` method::
375 The ``%result`` magic gets and prints the stdin/stdout/stderr of the last
376 command executed on each engine. It is simply a shortcut to the
377 :meth:`get_result` method:
378
379 .. sourcecode:: ipython
281 380
282 381 In [29]: %result
283 382 Out[29]:
284 383 <Results List>
285 384 [0] In [10]: print numpy.linalg.eigvals(a)
286 385 [0] Out[10]: [ 1.28167017 0.14197338]
287 386
288 387 [1] In [9]: print numpy.linalg.eigvals(a)
289 388 [1] Out[9]: [-0.14093616 1.27877273]
290 389
291 390 [2] In [10]: print numpy.linalg.eigvals(a)
292 391 [2] Out[10]: [-0.37023573 1.06779409]
293 392
294 393 [3] In [9]: print numpy.linalg.eigvals(a)
295 394 [3] Out[9]: [ 0.83664764 -0.25602658]
296 395
297 The ``%autopx`` magic switches to a mode where everything you type is executed on the engines given by the ``targets`` attribute::
396 The ``%autopx`` magic switches to a mode where everything you type is executed
397 on the engines given by the :attr:`targets` attribute:
398
399 .. sourcecode:: ipython
298 400
299 401 In [30]: mec.block=False
300 402
301 403 In [31]: %autopx
302 404 Auto Parallel Enabled
303 405 Type %autopx to disable
304 406
305 407 In [32]: max_evals = []
306 408 <IPython.kernel.multiengineclient.PendingResult object at 0x17b8a70>
307 409
308 410 In [33]: for i in range(100):
309 411 ....: a = numpy.random.rand(10,10)
310 412 ....: a = a+a.transpose()
311 413 ....: evals = numpy.linalg.eigvals(a)
312 414 ....: max_evals.append(evals[0].real)
313 415 ....:
314 416 ....:
315 417 <IPython.kernel.multiengineclient.PendingResult object at 0x17af8f0>
316 418
317 419 In [34]: %autopx
318 420 Auto Parallel Disabled
319 421
320 422 In [35]: mec.block=True
321 423
322 424 In [36]: px print "Average max eigenvalue is: ", sum(max_evals)/len(max_evals)
323 425 Executing command on Controller
324 426 Out[36]:
325 427 <Results List>
326 428 [0] In [13]: print "Average max eigenvalue is: ", sum(max_evals)/len(max_evals)
327 429 [0] Out[13]: Average max eigenvalue is: 10.1387247332
328 430
329 431 [1] In [12]: print "Average max eigenvalue is: ", sum(max_evals)/len(max_evals)
330 432 [1] Out[12]: Average max eigenvalue is: 10.2076902286
331 433
332 434 [2] In [13]: print "Average max eigenvalue is: ", sum(max_evals)/len(max_evals)
333 435 [2] Out[13]: Average max eigenvalue is: 10.1891484655
334 436
335 437 [3] In [12]: print "Average max eigenvalue is: ", sum(max_evals)/len(max_evals)
336 438 [3] Out[12]: Average max eigenvalue is: 10.1158837784
337 439
338 Using the ``with`` statement of Python 2.5
339 ------------------------------------------
340
341 Python 2.5 introduced the ``with`` statement. The ``MultiEngineClient`` can be used with the ``with`` statement to execute a block of code on the engines indicated by the ``targets`` attribute::
342
343 In [3]: with mec:
344 ...: client.remote() # Required so the following code is not run locally
345 ...: a = 10
346 ...: b = 30
347 ...: c = a+b
348 ...:
349 ...:
350
351 In [4]: mec.get_result()
352 Out[4]:
353 <Results List>
354 [0] In [1]: a = 10
355 b = 30
356 c = a+b
357
358 [1] In [1]: a = 10
359 b = 30
360 c = a+b
361
362 [2] In [1]: a = 10
363 b = 30
364 c = a+b
365 440
366 [3] In [1]: a = 10
367 b = 30
368 c = a+b
441 Moving Python objects around
442 ============================
369 443
370 This is basically another way of calling execute, but one with allows you to avoid writing code in strings. When used in this way, the attributes ``targets`` and ``block`` are used to control how the code is executed. For now, if you run code in non-blocking mode you won't have access to the ``PendingResult``.
371
372 Moving Python object around
373 ===========================
374
375 In addition to executing code on engines, you can transfer Python objects to and from your
376 IPython session and the engines. In IPython, these operations are called ``push`` (sending an
377 object to the engines) and ``pull`` (getting an object from the engines).
444 In addition to executing code on engines, you can transfer Python objects to
445 and from your IPython session and the engines. In IPython, these operations
446 are called :meth:`push` (sending an object to the engines) and :meth:`pull`
447 (getting an object from the engines).
378 448
379 449 Basic push and pull
380 450 -------------------
381 451
382 Here are some examples of how you use ``push`` and ``pull``::
452 Here are some examples of how you use :meth:`push` and :meth:`pull`:
453
454 .. sourcecode:: ipython
383 455
384 456 In [38]: mec.push(dict(a=1.03234,b=3453))
385 457 Out[38]: [None, None, None, None]
386 458
387 459 In [39]: mec.pull('a')
388 460 Out[39]: [1.03234, 1.03234, 1.03234, 1.03234]
389 461
390 462 In [40]: mec.pull('b',targets=0)
391 463 Out[40]: [3453]
392 464
393 465 In [41]: mec.pull(('a','b'))
394 466 Out[41]: [[1.03234, 3453], [1.03234, 3453], [1.03234, 3453], [1.03234, 3453]]
395 467
396 468 In [42]: mec.zip_pull(('a','b'))
397 469 Out[42]: [(1.03234, 1.03234, 1.03234, 1.03234), (3453, 3453, 3453, 3453)]
398 470
399 471 In [43]: mec.push(dict(c='speed'))
400 472 Out[43]: [None, None, None, None]
401 473
402 474 In [44]: %px print c
403 475 Executing command on Controller
404 476 Out[44]:
405 477 <Results List>
406 478 [0] In [14]: print c
407 479 [0] Out[14]: speed
408 480
409 481 [1] In [13]: print c
410 482 [1] Out[13]: speed
411 483
412 484 [2] In [14]: print c
413 485 [2] Out[14]: speed
414 486
415 487 [3] In [13]: print c
416 488 [3] Out[13]: speed
417 489
418 In non-blocking mode ``push`` and ``pull`` also return ``PendingResult`` objects::
490 In non-blocking mode :meth:`push` and :meth:`pull` also return
491 :class:`PendingResult` objects:
492
493 .. sourcecode:: ipython
419 494
420 495 In [47]: mec.block=False
421 496
422 497 In [48]: pr = mec.pull('a')
423 498
424 499 In [49]: pr.r
425 500 Out[49]: [1.03234, 1.03234, 1.03234, 1.03234]
426 501
427 502
428 503 Push and pull for functions
429 504 ---------------------------
430 505
431 Functions can also be pushed and pulled using ``push_function`` and ``pull_function``::
506 Functions can also be pushed and pulled using :meth:`push_function` and
507 :meth:`pull_function`:
508
509 .. sourcecode:: ipython
510
511 In [52]: mec.block=True
432 512
433 513 In [53]: def f(x):
434 514 ....: return 2.0*x**4
435 515 ....:
436 516
437 517 In [54]: mec.push_function(dict(f=f))
438 518 Out[54]: [None, None, None, None]
439 519
440 520 In [55]: mec.execute('y = f(4.0)')
441 521 Out[55]:
442 522 <Results List>
443 523 [0] In [15]: y = f(4.0)
444 524 [1] In [14]: y = f(4.0)
445 525 [2] In [15]: y = f(4.0)
446 526 [3] In [14]: y = f(4.0)
447 527
448 528
449 529 In [56]: px print y
450 530 Executing command on Controller
451 531 Out[56]:
452 532 <Results List>
453 533 [0] In [16]: print y
454 534 [0] Out[16]: 512.0
455 535
456 536 [1] In [15]: print y
457 537 [1] Out[15]: 512.0
458 538
459 539 [2] In [16]: print y
460 540 [2] Out[16]: 512.0
461 541
462 542 [3] In [15]: print y
463 543 [3] Out[15]: 512.0
464 544
465 545
466 546 Dictionary interface
467 547 --------------------
468 548
469 As a shorthand to ``push`` and ``pull``, the ``MultiEngineClient`` class implements some of the Python dictionary interface. This make the remote namespaces of the engines appear as a local dictionary. Underneath, this uses ``push`` and ``pull``::
549 As a shorthand to :meth:`push` and :meth:`pull`, the
550 :class:`MultiEngineClient` class implements some of the Python dictionary
551 interface. This make the remote namespaces of the engines appear as a local
552 dictionary. Underneath, this uses :meth:`push` and :meth:`pull`:
553
554 .. sourcecode:: ipython
470 555
471 556 In [50]: mec.block=True
472 557
473 558 In [51]: mec['a']=['foo','bar']
474 559
475 560 In [52]: mec['a']
476 561 Out[52]: [['foo', 'bar'], ['foo', 'bar'], ['foo', 'bar'], ['foo', 'bar']]
477 562
478 563 Scatter and gather
479 564 ------------------
480 565
481 Sometimes it is useful to partition a sequence and push the partitions to different engines. In
482 MPI language, this is know as scatter/gather and we follow that terminology. However, it is
483 important to remember that in IPython ``scatter`` is from the interactive IPython session to
484 the engines and ``gather`` is from the engines back to the interactive IPython session. For
485 scatter/gather operations between engines, MPI should be used::
566 Sometimes it is useful to partition a sequence and push the partitions to
567 different engines. In MPI language, this is know as scatter/gather and we
568 follow that terminology. However, it is important to remember that in
569 IPython's :class:`MultiEngineClient` class, :meth:`scatter` is from the
570 interactive IPython session to the engines and :meth:`gather` is from the
571 engines back to the interactive IPython session. For scatter/gather operations
572 between engines, MPI should be used:
573
574 .. sourcecode:: ipython
486 575
487 576 In [58]: mec.scatter('a',range(16))
488 577 Out[58]: [None, None, None, None]
489 578
490 579 In [59]: px print a
491 580 Executing command on Controller
492 581 Out[59]:
493 582 <Results List>
494 583 [0] In [17]: print a
495 584 [0] Out[17]: [0, 1, 2, 3]
496 585
497 586 [1] In [16]: print a
498 587 [1] Out[16]: [4, 5, 6, 7]
499 588
500 589 [2] In [17]: print a
501 590 [2] Out[17]: [8, 9, 10, 11]
502 591
503 592 [3] In [16]: print a
504 593 [3] Out[16]: [12, 13, 14, 15]
505 594
506 595
507 596 In [60]: mec.gather('a')
508 597 Out[60]: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
509 598
510 599 Other things to look at
511 600 =======================
512 601
513 Parallel map
514 ------------
515
516 Python's builtin ``map`` functions allows a function to be applied to a sequence element-by-element. This type of code is typically trivial to parallelize. In fact, the MultiEngine interface in IPython already has a parallel version of ``map`` that works just like its serial counterpart::
517
518 In [63]: serial_result = map(lambda x:x**10, range(32))
519
520 In [64]: parallel_result = mec.map(lambda x:x**10, range(32))
521
522 In [65]: serial_result==parallel_result
523 Out[65]: True
524
525 As you would expect, the parallel version of ``map`` is also influenced by the ``block`` and ``targets`` keyword arguments and attributes.
526
527 602 How to do parallel list comprehensions
528 603 --------------------------------------
529 604
530 In many cases list comprehensions are nicer than using the map function. While we don't have fully parallel list comprehensions, it is simple to get the basic effect using ``scatter`` and ``gather``::
605 In many cases list comprehensions are nicer than using the map function. While
606 we don't have fully parallel list comprehensions, it is simple to get the
607 basic effect using :meth:`scatter` and :meth:`gather`:
608
609 .. sourcecode:: ipython
531 610
532 611 In [66]: mec.scatter('x',range(64))
533 612 Out[66]: [None, None, None, None]
534 613
535 614 In [67]: px y = [i**10 for i in x]
536 615 Executing command on Controller
537 616 Out[67]:
538 617 <Results List>
539 618 [0] In [19]: y = [i**10 for i in x]
540 619 [1] In [18]: y = [i**10 for i in x]
541 620 [2] In [19]: y = [i**10 for i in x]
542 621 [3] In [18]: y = [i**10 for i in x]
543 622
544 623
545 624 In [68]: y = mec.gather('y')
546 625
547 626 In [69]: print y
548 627 [0, 1, 1024, 59049, 1048576, 9765625, 60466176, 282475249, 1073741824,...]
549 628
550 Parallel Exceptions
629 Parallel exceptions
551 630 -------------------
552 631
553 In the MultiEngine interface, parallel commands can raise Python exceptions, just like serial commands. But, it is a little subtle, because a single parallel command can actually raise multiple exceptions (one for each engine the command was run on). To express this idea, the MultiEngine interface has a ``CompositeError`` exception class that will be raised in most cases. The ``CompositeError`` class is a special type of exception that wraps one or more other types of exceptions. Here is how it works::
632 In the multiengine interface, parallel commands can raise Python exceptions,
633 just like serial commands. But, it is a little subtle, because a single
634 parallel command can actually raise multiple exceptions (one for each engine
635 the command was run on). To express this idea, the MultiEngine interface has a
636 :exc:`CompositeError` exception class that will be raised in most cases. The
637 :exc:`CompositeError` class is a special type of exception that wraps one or
638 more other types of exceptions. Here is how it works:
639
640 .. sourcecode:: ipython
554 641
555 642 In [76]: mec.block=True
556 643
557 644 In [77]: mec.execute('1/0')
558 645 ---------------------------------------------------------------------------
559 646 CompositeError Traceback (most recent call last)
560 647
561 648 /ipython1-client-r3021/docs/examples/<ipython console> in <module>()
562 649
563 650 /ipython1-client-r3021/ipython1/kernel/multiengineclient.pyc in execute(self, lines, targets, block)
564 651 432 targets, block = self._findTargetsAndBlock(targets, block)
565 652 433 result = blockingCallFromThread(self.smultiengine.execute, lines,
566 653 --> 434 targets=targets, block=block)
567 654 435 if block:
568 655 436 result = ResultList(result)
569 656
570 657 /ipython1-client-r3021/ipython1/kernel/twistedutil.pyc in blockingCallFromThread(f, *a, **kw)
571 658 72 result.raiseException()
572 659 73 except Exception, e:
573 660 ---> 74 raise e
574 661 75 return result
575 662 76
576 663
577 664 CompositeError: one or more exceptions from call to method: execute
578 665 [0:execute]: ZeroDivisionError: integer division or modulo by zero
579 666 [1:execute]: ZeroDivisionError: integer division or modulo by zero
580 667 [2:execute]: ZeroDivisionError: integer division or modulo by zero
581 668 [3:execute]: ZeroDivisionError: integer division or modulo by zero
582 669
583 Notice how the error message printed when ``CompositeError`` is raised has information about the individual exceptions that were raised on each engine. If you want, you can even raise one of these original exceptions::
670 Notice how the error message printed when :exc:`CompositeError` is raised has information about the individual exceptions that were raised on each engine. If you want, you can even raise one of these original exceptions:
671
672 .. sourcecode:: ipython
584 673
585 674 In [80]: try:
586 675 ....: mec.execute('1/0')
587 676 ....: except client.CompositeError, e:
588 677 ....: e.raise_exception()
589 678 ....:
590 679 ....:
591 680 ---------------------------------------------------------------------------
592 681 ZeroDivisionError Traceback (most recent call last)
593 682
594 683 /ipython1-client-r3021/docs/examples/<ipython console> in <module>()
595 684
596 685 /ipython1-client-r3021/ipython1/kernel/error.pyc in raise_exception(self, excid)
597 686 156 raise IndexError("an exception with index %i does not exist"%excid)
598 687 157 else:
599 688 --> 158 raise et, ev, etb
600 689 159
601 690 160 def collect_exceptions(rlist, method):
602 691
603 692 ZeroDivisionError: integer division or modulo by zero
604 693
605 If you are working in IPython, you can simple type ``%debug`` after one of these ``CompositeError`` is raised, and inspect the exception instance::
694 If you are working in IPython, you can simple type ``%debug`` after one of
695 these :exc:`CompositeError` exceptions is raised, and inspect the exception
696 instance:
697
698 .. sourcecode:: ipython
606 699
607 700 In [81]: mec.execute('1/0')
608 701 ---------------------------------------------------------------------------
609 702 CompositeError Traceback (most recent call last)
610 703
611 704 /ipython1-client-r3021/docs/examples/<ipython console> in <module>()
612 705
613 706 /ipython1-client-r3021/ipython1/kernel/multiengineclient.pyc in execute(self, lines, targets, block)
614 707 432 targets, block = self._findTargetsAndBlock(targets, block)
615 708 433 result = blockingCallFromThread(self.smultiengine.execute, lines,
616 709 --> 434 targets=targets, block=block)
617 710 435 if block:
618 711 436 result = ResultList(result)
619 712
620 713 /ipython1-client-r3021/ipython1/kernel/twistedutil.pyc in blockingCallFromThread(f, *a, **kw)
621 714 72 result.raiseException()
622 715 73 except Exception, e:
623 716 ---> 74 raise e
624 717 75 return result
625 718 76
626 719
627 720 CompositeError: one or more exceptions from call to method: execute
628 721 [0:execute]: ZeroDivisionError: integer division or modulo by zero
629 722 [1:execute]: ZeroDivisionError: integer division or modulo by zero
630 723 [2:execute]: ZeroDivisionError: integer division or modulo by zero
631 724 [3:execute]: ZeroDivisionError: integer division or modulo by zero
632 725
633 726 In [82]: %debug
634 727 >
635 728
636 729 /ipython1-client-r3021/ipython1/kernel/twistedutil.py(74)blockingCallFromThread()
637 730 73 except Exception, e:
638 731 ---> 74 raise e
639 732 75 return result
640 733
641 734 # With the debugger running, e is the exceptions instance. We can tab complete
642 735 # on it and see the extra methods that are available.
643 736 ipdb> e.
644 737 e.__class__ e.__getitem__ e.__new__ e.__setstate__ e.args
645 738 e.__delattr__ e.__getslice__ e.__reduce__ e.__str__ e.elist
646 739 e.__dict__ e.__hash__ e.__reduce_ex__ e.__weakref__ e.message
647 740 e.__doc__ e.__init__ e.__repr__ e._get_engine_str e.print_tracebacks
648 741 e.__getattribute__ e.__module__ e.__setattr__ e._get_traceback e.raise_exception
649 742 ipdb> e.print_tracebacks()
650 743 [0:execute]:
651 744 ---------------------------------------------------------------------------
652 745 ZeroDivisionError Traceback (most recent call last)
653 746
654 747 /ipython1-client-r3021/docs/examples/<string> in <module>()
655 748
656 749 ZeroDivisionError: integer division or modulo by zero
657 750
658 751 [1:execute]:
659 752 ---------------------------------------------------------------------------
660 753 ZeroDivisionError Traceback (most recent call last)
661 754
662 755 /ipython1-client-r3021/docs/examples/<string> in <module>()
663 756
664 757 ZeroDivisionError: integer division or modulo by zero
665 758
666 759 [2:execute]:
667 760 ---------------------------------------------------------------------------
668 761 ZeroDivisionError Traceback (most recent call last)
669 762
670 763 /ipython1-client-r3021/docs/examples/<string> in <module>()
671 764
672 765 ZeroDivisionError: integer division or modulo by zero
673 766
674 767 [3:execute]:
675 768 ---------------------------------------------------------------------------
676 769 ZeroDivisionError Traceback (most recent call last)
677 770
678 771 /ipython1-client-r3021/docs/examples/<string> in <module>()
679 772
680 773 ZeroDivisionError: integer division or modulo by zero
681 774
682 All of this same error handling magic even works in non-blocking mode::
775 .. note::
776
777 The above example appears to be broken right now because of a change in
778 how we are using Twisted.
779
780 All of this same error handling magic even works in non-blocking mode:
781
782 .. sourcecode:: ipython
683 783
684 784 In [83]: mec.block=False
685 785
686 786 In [84]: pr = mec.execute('1/0')
687 787
688 788 In [85]: pr.r
689 789 ---------------------------------------------------------------------------
690 790 CompositeError Traceback (most recent call last)
691 791
692 792 /ipython1-client-r3021/docs/examples/<ipython console> in <module>()
693 793
694 794 /ipython1-client-r3021/ipython1/kernel/multiengineclient.pyc in _get_r(self)
695 795 170
696 796 171 def _get_r(self):
697 797 --> 172 return self.get_result(block=True)
698 798 173
699 799 174 r = property(_get_r)
700 800
701 801 /ipython1-client-r3021/ipython1/kernel/multiengineclient.pyc in get_result(self, default, block)
702 802 131 return self.result
703 803 132 try:
704 804 --> 133 result = self.client.get_pending_deferred(self.result_id, block)
705 805 134 except error.ResultNotCompleted:
706 806 135 return default
707 807
708 808 /ipython1-client-r3021/ipython1/kernel/multiengineclient.pyc in get_pending_deferred(self, deferredID, block)
709 809 385
710 810 386 def get_pending_deferred(self, deferredID, block):
711 811 --> 387 return blockingCallFromThread(self.smultiengine.get_pending_deferred, deferredID, block)
712 812 388
713 813 389 def barrier(self, pendingResults):
714 814
715 815 /ipython1-client-r3021/ipython1/kernel/twistedutil.pyc in blockingCallFromThread(f, *a, **kw)
716 816 72 result.raiseException()
717 817 73 except Exception, e:
718 818 ---> 74 raise e
719 819 75 return result
720 820 76
721 821
722 822 CompositeError: one or more exceptions from call to method: execute
723 823 [0:execute]: ZeroDivisionError: integer division or modulo by zero
724 824 [1:execute]: ZeroDivisionError: integer division or modulo by zero
725 825 [2:execute]: ZeroDivisionError: integer division or modulo by zero
726 826 [3:execute]: ZeroDivisionError: integer division or modulo by zero
727 827
728 828
@@ -1,240 +1,99
1 1 .. _paralleltask:
2 2
3 =================================
4 The IPython Task interface
5 =================================
3 ==========================
4 The IPython task interface
5 ==========================
6 6
7 .. contents::
7 The task interface to the controller presents the engines as a fault tolerant, dynamic load-balanced system or workers. Unlike the multiengine interface, in the task interface, the user have no direct access to individual engines. In some ways, this interface is simpler, but in other ways it is more powerful.
8 8
9 The ``Task`` interface to the controller presents the engines as a fault tolerant, dynamic load-balanced system or workers. Unlike the ``MultiEngine`` interface, in the ``Task`` interface, the user have no direct access to individual engines. In some ways, this interface is simpler, but in other ways it is more powerful. Best of all the user can use both of these interfaces at the same time to take advantage or both of their strengths. When the user can break up the user's work into segments that do not depend on previous execution, the ``Task`` interface is ideal. But it also has more power and flexibility, allowing the user to guide the distribution of jobs, without having to assign Tasks to engines explicitly.
9 Best of all the user can use both of these interfaces running at the same time to take advantage or both of their strengths. When the user can break up the user's work into segments that do not depend on previous execution, the task interface is ideal. But it also has more power and flexibility, allowing the user to guide the distribution of jobs, without having to assign tasks to engines explicitly.
10 10
11 11 Starting the IPython controller and engines
12 12 ===========================================
13 13
14 To follow along with this tutorial, the user will need to start the IPython
15 controller and four IPython engines. The simplest way of doing this is to
16 use the ``ipcluster`` command::
14 To follow along with this tutorial, you will need to start the IPython
15 controller and four IPython engines. The simplest way of doing this is to use
16 the :command:`ipcluster` command::
17 17
18 $ ipcluster -n 4
18 $ ipcluster local -n 4
19 19
20 For more detailed information about starting the controller and engines, see our :ref:`introduction <ip1par>` to using IPython for parallel computing.
20 For more detailed information about starting the controller and engines, see
21 our :ref:`introduction <ip1par>` to using IPython for parallel computing.
21 22
22 The magic here is that this single controller and set of engines is running both the MultiEngine and ``Task`` interfaces simultaneously.
23 Creating a ``TaskClient`` instance
24 =========================================
23 25
24 QuickStart Task Farming
25 =======================
26 The first step is to import the IPython :mod:`IPython.kernel.client` module
27 and then create a :class:`TaskClient` instance:
26 28
27 First, a quick example of how to start running the most basic Tasks.
28 The first step is to import the IPython ``client`` module and then create a ``TaskClient`` instance::
29
30 In [1]: from IPython.kernel import client
31
32 In [2]: tc = client.TaskClient()
29 .. sourcecode:: ipython
33 30
34 Then the user wrap the commands the user want to run in Tasks::
31 In [1]: from IPython.kernel import client
32
33 In [2]: tc = client.TaskClient()
34
35 This form assumes that the :file:`ipcontroller-tc.furl` is in the
36 :file:`~./ipython/security` directory on the client's host. If not, the
37 location of the FURL file must be given as an argument to the
38 constructor:
39
40 .. sourcecode:: ipython
41
42 In [2]: mec = client.TaskClient('/path/to/my/ipcontroller-tc.furl')
43
44 Quick and easy parallelism
45 ==========================
46
47 In many cases, you simply want to apply a Python function to a sequence of objects, but *in parallel*. Like the multiengine interface, the task interface provides two simple ways of accomplishing this: a parallel version of :func:`map` and ``@parallel`` function decorator. However, the verions in the task interface have one important difference: they are dynamically load balanced. Thus, if the execution time per item varies significantly, you should use the versions in the task interface.
48
49 Parallel map
50 ------------
51
52 The parallel :meth:`map` in the task interface is similar to that in the multiengine interface:
53
54 .. sourcecode:: ipython
55
56 In [63]: serial_result = map(lambda x:x**10, range(32))
57
58 In [64]: parallel_result = tc.map(lambda x:x**10, range(32))
59
60 In [65]: serial_result==parallel_result
61 Out[65]: True
62
63 Parallel function decorator
64 ---------------------------
65
66 Parallel functions are just like normal function, but they can be called on sequences and *in parallel*. The multiengine interface provides a decorator that turns any Python function into a parallel function:
67
68 .. sourcecode:: ipython
69
70 In [10]: @tc.parallel()
71 ....: def f(x):
72 ....: return 10.0*x**4
73 ....:
74
75 In [11]: f(range(32)) # this is done in parallel
76 Out[11]:
77 [0.0,10.0,160.0,...]
78
79 More details
80 ============
81
82 The :class:`TaskClient` has many more powerful features that allow quite a bit of flexibility in how tasks are defined and run. The next places to look are in the following classes:
35 83
36 In [3]: tasklist = []
37 In [4]: for n in range(1000):
38 ... tasklist.append(client.Task("a = %i"%n, pull="a"))
84 * :class:`IPython.kernel.client.TaskClient`
85 * :class:`IPython.kernel.client.StringTask`
86 * :class:`IPython.kernel.client.MapTask`
39 87
40 The first argument of the ``Task`` constructor is a string, the command to be executed. The most important optional keyword argument is ``pull``, which can be a string or list of strings, and it specifies the variable names to be saved as results of the ``Task``.
88 The following is an overview of how to use these classes together:
41 89
42 Next, the user need to submit the Tasks to the ``TaskController`` with the ``TaskClient``::
90 1. Create a :class:`TaskClient`.
91 2. Create one or more instances of :class:`StringTask` or :class:`MapTask`
92 to define your tasks.
93 3. Submit your tasks to using the :meth:`run` method of your
94 :class:`TaskClient` instance.
95 4. Use :meth:`TaskClient.get_task_result` to get the results of the
96 tasks.
43 97
44 In [5]: taskids = [ tc.run(t) for t in tasklist ]
98 We are in the process of developing more detailed information about the task interface. For now, the docstrings of the :class:`TaskClient`, :class:`StringTask` and :class:`MapTask` classes should be consulted.
45 99
46 This will give the user a list of the TaskIDs used by the controller to keep track of the Tasks and their results. Now at some point the user are going to want to get those results back. The ``barrier`` method allows the user to wait for the Tasks to finish running::
47
48 In [6]: tc.barrier(taskids)
49
50 This command will block until all the Tasks in ``taskids`` have finished. Now, the user probably want to look at the user's results::
51
52 In [7]: task_results = [ tc.get_task_result(taskid) for taskid in taskids ]
53
54 Now the user have a list of ``TaskResult`` objects, which have the actual result as a dictionary, but also keep track of some useful metadata about the ``Task``::
55
56 In [8]: tr = ``Task``_results[73]
57
58 In [9]: tr
59 Out[9]: ``TaskResult``[ID:73]:{'a':73}
60
61 In [10]: tr.engineid
62 Out[10]: 1
63
64 In [11]: tr.submitted, tr.completed, tr.duration
65 Out[11]: ("2008/03/08 03:41:42", "2008/03/08 03:41:44", 2.12345)
66
67 The actual results are stored in a dictionary, ``tr.results``, and a namespace object ``tr.ns`` which accesses the result keys by attribute::
68
69 In [12]: tr.results['a']
70 Out[12]: 73
71
72 In [13]: tr.ns.a
73 Out[13]: 73
74
75 That should cover the basics of running simple Tasks. There are several more powerful things the user can do with Tasks covered later. The most useful probably being using a ``MutiEngineClient`` interface to initialize all the engines with the import dependencies necessary to run the user's Tasks.
76
77 There are many options for running and managing Tasks. The best way to learn further about the ``Task`` interface is to study the examples in ``docs/examples``. If the user do so and learn a lots about this interface, we encourage the user to expand this documentation about the ``Task`` system.
78
79 Overview of the Task System
80 ===========================
81
82 The user's view of the ``Task`` system has three basic objects: The ``TaskClient``, the ``Task``, and the ``TaskResult``. The names of these three objects well indicate their role.
83
84 The ``TaskClient`` is the user's ``Task`` farming connection to the IPython cluster. Unlike the ``MultiEngineClient``, the ``TaskControler`` handles all the scheduling and distribution of work, so the ``TaskClient`` has no notion of engines, it just submits Tasks and requests their results. The Tasks are described as ``Task`` objects, and their results are wrapped in ``TaskResult`` objects. Thus, there are very few necessary methods for the user to manage.
85
86 Inside the task system is a Scheduler object, which assigns tasks to workers. The default scheduler is a simple FIFO queue. Subclassing the Scheduler should be easy, just implementing your own priority system.
87
88 The TaskClient
89 ==============
90
91 The ``TaskClient`` is the object the user use to connect to the ``Controller`` that is managing the user's Tasks. It is the analog of the ``MultiEngineClient`` for the standard IPython multiplexing interface. As with all client interfaces, the first step is to import the IPython Client Module::
92
93 In [1]: from IPython.kernel import client
94
95 Just as with the ``MultiEngineClient``, the user create the ``TaskClient`` with a tuple, containing the ip-address and port of the ``Controller``. the ``client`` module conveniently has the default address of the ``Task`` interface of the controller. Creating a default ``TaskClient`` object would be done with this::
96
97 In [2]: tc = client.TaskClient(client.default_task_address)
98
99 or, if the user want to specify a non default location of the ``Controller``, the user can specify explicitly::
100
101 In [3]: tc = client.TaskClient(("192.168.1.1", 10113))
102
103 As discussed earlier, the ``TaskClient`` only has a few basic methods.
104
105 * ``tc.run(task)``
106 ``run`` is the method by which the user submits Tasks. It takes exactly one argument, a ``Task`` object. All the advanced control of ``Task`` behavior is handled by properties of the ``Task`` object, rather than the submission command, so they will be discussed later in the `Task`_ section. ``run`` returns an integer, the ``Task``ID by which the ``Task`` and its results can be tracked and retrieved::
107
108 In [4]: ``Task``ID = tc.run(``Task``)
109
110 * ``tc.get_task_result(taskid, block=``False``)``
111 ``get_task_result`` is the method by which results are retrieved. It takes a single integer argument, the ``Task``ID`` of the result the user wish to retrieve. ``get_task_result`` also takes a keyword argument ``block``. ``block`` specifies whether the user actually want to wait for the result. If ``block`` is false, as it is by default, ``get_task_result`` will return immediately. If the ``Task`` has completed, it will return the ``TaskResult`` object for that ``Task``. But if the ``Task`` has not completed, it will return ``None``. If the user specify ``block=``True``, then ``get_task_result`` will wait for the ``Task`` to complete, and always return the ``TaskResult`` for the requested ``Task``.
112 * ``tc.barrier(taskid(s))``
113 ``barrier`` is a synchronization method. It takes exactly one argument, a ``Task``ID or list of taskIDs. ``barrier`` will block until all the specified Tasks have completed. In practice, a barrier is often called between the ``Task`` submission section of the code and the result gathering section::
114
115 In [5]: taskIDs = [ tc.run(``Task``) for ``Task`` in myTasks ]
116
117 In [6]: tc.get_task_result(taskIDs[-1]) is None
118 Out[6]: ``True``
119
120 In [7]: tc.barrier(``Task``ID)
121
122 In [8]: results = [ tc.get_task_result(tid) for tid in taskIDs ]
123
124 * ``tc.queue_status(verbose=``False``)``
125 ``queue_status`` is a method for querying the state of the ``TaskControler``. ``queue_status`` returns a dict of the form::
126
127 {'scheduled': Tasks that have been submitted but yet run
128 'pending' : Tasks that are currently running
129 'succeeded': Tasks that have completed successfully
130 'failed' : Tasks that have finished with a failure
131 }
132
133 if @verbose is not specified (or is ``False``), then the values of the dict are integers - the number of Tasks in each state. if @verbose is ``True``, then each element in the dict is a list of the taskIDs in that state::
134
135 In [8]: tc.queue_status()
136 Out[8]: {'scheduled': 4,
137 'pending' : 2,
138 'succeeded': 5,
139 'failed' : 1
140 }
141
142 In [9]: tc.queue_status(verbose=True)
143 Out[9]: {'scheduled': [8,9,10,11],
144 'pending' : [6,7],
145 'succeeded': [0,1,2,4,5],
146 'failed' : [3]
147 }
148
149 * ``tc.abort(taskid)``
150 ``abort`` allows the user to abort Tasks that have already been submitted. ``abort`` will always return immediately. If the ``Task`` has completed, ``abort`` will raise an ``IndexError ``Task`` Already Completed``. An obvious case for ``abort`` would be where the user submits a long-running ``Task`` with a number of retries (see ``Task``_ section for how to specify retries) in an interactive session, but realizes there has been a typo. The user can then abort the ``Task``, preventing certain failures from cluttering up the queue. It can also be used for parallel search-type problems, where only one ``Task`` will give the solution, so once the user find the solution, the user would want to abort all remaining Tasks to prevent wasted work.
151 * ``tc.spin()``
152 ``spin`` simply triggers the scheduler in the ``TaskControler``. Under most normal circumstances, this will do nothing. The primary known usage case involves the ``Task`` dependency (see `Dependencies`_). The dependency is a function of an Engine's ``properties``, but changing the ``properties`` via the ``MutliEngineClient`` does not trigger a reschedule event. The main example case for this requires the following event sequence:
153 * ``engine`` is available, ``Task`` is submitted, but ``engine`` does not have ``Task``'s dependencies.
154 * ``engine`` gets necessary dependencies while no new Tasks are submitted or completed.
155 * now ``engine`` can run ``Task``, but a ``Task`` event is required for the ``TaskControler`` to try scheduling ``Task`` again.
156
157 ``spin`` is just an empty ping method to ensure that the Controller has scheduled all available Tasks, and should not be needed under most normal circumstances.
158
159 That covers the ``TaskClient``, a simple interface to the cluster. With this, the user can submit jobs (and abort if necessary), request their results, synchronize on arbitrary subsets of jobs.
160
161 .. _task: The Task Object
162
163 The Task Object
164 ===============
165
166 The ``Task`` is the basic object for describing a job. It can be used in a very simple manner, where the user just specifies a command string to be executed as the ``Task``. The usage of this first argument is exactly the same as the ``execute`` method of the ``MultiEngine`` (in fact, ``execute`` is called to run the code)::
167
168 In [1]: t = client.Task("a = str(id)")
169
170 This ``Task`` would run, and store the string representation of the ``id`` element in ``a`` in each worker's namespace, but it is fairly useless because the user does not know anything about the state of the ``worker`` on which it ran at the time of retrieving results. It is important that each ``Task`` not expect the state of the ``worker`` to persist after the ``Task`` is completed.
171 There are many different situations for using ``Task`` Farming, and the ``Task`` object has many attributes for use in customizing the ``Task`` behavior. All of a ``Task``'s attributes may be specified in the constructor, through keyword arguments, or after ``Task`` construction through attribute assignment.
172
173 Data Attributes
174 ***************
175 It is likely that the user may want to move data around before or after executing the ``Task``. We provide methods of sending data to initialize the worker's namespace, and specifying what data to bring back as the ``Task``'s results.
176
177 * pull = []
178 The obvious case is as above, where ``t`` would execute and store the result of ``myfunc`` in ``a``, it is likely that the user would want to bring ``a`` back to their namespace. This is done through the ``pull`` attribute. ``pull`` can be a string or list of strings, and it specifies the names of variables to be retrieved. The ``TaskResult`` object retrieved by ``get_task_result`` will have a dictionary of keys and values, and the ``Task``'s ``pull`` attribute determines what goes into it::
179
180 In [2]: t = client.Task("a = str(id)", pull = "a")
181
182 In [3]: t = client.Task("a = str(id)", pull = ["a", "id"])
183
184 * push = {}
185 A user might also want to initialize some data into the namespace before the code part of the ``Task`` is run. Enter ``push``. ``push`` is a dictionary of key/value pairs to be loaded from the user's namespace into the worker's immediately before execution::
186
187 In [4]: t = client.Task("a = f(submitted)", push=dict(submitted=time.time()), pull="a")
188
189 push and pull result directly in calling an ``engine``'s ``push`` and ``pull`` methods before and after ``Task`` execution respectively, and thus their api is the same.
190
191 Namespace Cleaning
192 ******************
193 When a user is running a large number of Tasks, it is likely that the namespace of the worker's could become cluttered. Some Tasks might be sensitive to clutter, while others might be known to cause namespace pollution. For these reasons, Tasks have two boolean attributes for cleaning up the namespace.
194
195 * ``clear_after``
196 if clear_after is specified ``True``, the worker on which the ``Task`` was run will be reset (via ``engine.reset``) upon completion of the ``Task``. This can be useful for both Tasks that produce clutter or Tasks whose intermediate data one might wish to be kept private::
197
198 In [5]: t = client.Task("a = range(1e10)", pull = "a",clear_after=True)
199
200
201 * ``clear_before``
202 as one might guess, clear_before is identical to ``clear_after``, but it takes place before the ``Task`` is run. This ensures that the ``Task`` runs on a fresh worker::
203
204 In [6]: t = client.Task("a = globals()", pull = "a",clear_before=True)
205
206 Of course, a user can both at the same time, ensuring that all workers are clear except when they are currently running a job. Both of these default to ``False``.
207
208 Fault Tolerance
209 ***************
210 It is possible that Tasks might fail, and there are a variety of reasons this could happen. One might be that the worker it was running on disconnected, and there was nothing wrong with the ``Task`` itself. With the fault tolerance attributes of the ``Task``, the user can specify how many times to resubmit the ``Task``, and what to do if it never succeeds.
211
212 * ``retries``
213 ``retries`` is an integer, specifying the number of times a ``Task`` is to be retried. It defaults to zero. It is often a good idea for this number to be 1 or 2, to protect the ``Task`` from disconnecting engines, but not a large number. If a ``Task`` is failing 100 times, there is probably something wrong with the ``Task``. The canonical bad example:
214
215 In [7]: t = client.Task("os.kill(os.getpid(), 9)", retries=99)
216
217 This would actually take down 100 workers.
218
219 * ``recovery_task``
220 ``recovery_task`` is another ``Task`` object, to be run in the event of the original ``Task`` still failing after running out of retries. Since ``recovery_task`` is another ``Task`` object, it can have its own ``recovery_task``. The chain of Tasks is limitless, except loops are not allowed (that would be bad!).
221
222 Dependencies
223 ************
224 Dependencies are the most powerful part of the ``Task`` farming system, because it allows the user to do some classification of the workers, and guide the ``Task`` distribution without meddling with the controller directly. It makes use of two objects - the ``Task``'s ``depend`` attribute, and the engine's ``properties``. See the `MultiEngine`_ reference for how to use engine properties. The engine properties api exists for extending IPython, allowing conditional execution and new controllers that make decisions based on properties of its engines. Currently the ``Task`` dependency is the only internal use of the properties api.
225
226 .. _MultiEngine: ./parallel_multiengine
227
228 The ``depend`` attribute of a ``Task`` must be a function of exactly one argument, the worker's properties dictionary, and it should return ``True`` if the ``Task`` should be allowed to run on the worker and ``False`` if not. The usage in the controller is fault tolerant, so exceptions raised by ``Task.depend`` will be ignored and functionally equivalent to always returning ``False``. Tasks`` with invalid ``depend`` functions will never be assigned to a worker::
229
230 In [8]: def dep(properties):
231 ... return properties["RAM"] > 2**32 # have at least 4GB
232 In [9]: t = client.Task("a = bigfunc()", depend=dep)
233
234 It is important to note that assignment of values to the properties dict is done entirely by the user, either locally (in the engine) using the EngineAPI, or remotely, through the ``MultiEngineClient``'s get/set_properties methods.
235
236
237
238
239
240
1 NO CONTENT: file renamed from IPython/config/config.py to sandbox/config.py
1 NO CONTENT: file renamed from IPython/config/tests/sample_config.py to sandbox/sample_config.py
1 NO CONTENT: file renamed from IPython/config/tests/test_config.py to sandbox/test_config.py
1 NO CONTENT: file renamed from IPython/config/traitlets.py to sandbox/traitlets.py
1 NO CONTENT: modified file chmod 100644 => 100755, file renamed from scripts/wxIpython to scripts/ipython-wx
@@ -1,85 +1,106
1 1 #!python
2 2 """Windows-specific part of the installation"""
3 3
4 4 import os, sys, shutil
5 pjoin = os.path.join
5 6
6 7 def mkshortcut(target,description,link_file,*args,**kw):
7 8 """make a shortcut if it doesn't exist, and register its creation"""
8 9
9 10 create_shortcut(target, description, link_file,*args,**kw)
10 11 file_created(link_file)
11 12
12 13 def install():
13 14 """Routine to be run by the win32 installer with the -install switch."""
14
15
15 16 from IPython.Release import version
16
17
17 18 # Get some system constants
18 19 prefix = sys.prefix
19 python = prefix + r'\python.exe'
20 # Lookup path to common startmenu ...
21 ip_dir = get_special_folder_path('CSIDL_COMMON_PROGRAMS') + r'\IPython'
20 python = pjoin(prefix, 'python.exe')
22 21
23 # Some usability warnings at installation time. I don't want them at the
24 # top-level, so they don't appear if the user is uninstalling.
25 try:
26 import ctypes
27 except ImportError:
28 print ('To take full advantage of IPython, you need ctypes from:\n'
29 'http://sourceforge.net/projects/ctypes')
30
31 try:
32 import win32con
33 except ImportError:
34 print ('To take full advantage of IPython, you need pywin32 from:\n'
35 'http://starship.python.net/crew/mhammond/win32/Downloads.html')
36
37 try:
38 import readline
39 except ImportError:
40 print ('To take full advantage of IPython, you need readline from:\n'
41 'http://sourceforge.net/projects/uncpythontools')
42
43 ipybase = '"'+prefix+r'\scripts\ipython"'
22 # Lookup path to common startmenu ...
23 ip_start_menu = pjoin(get_special_folder_path('CSIDL_COMMON_PROGRAMS'), 'IPython')
44 24 # Create IPython entry ...
45 if not os.path.isdir(ip_dir):
46 os.mkdir(ip_dir)
47 directory_created(ip_dir)
48
49 # Create program shortcuts ...
50 f = ip_dir + r'\IPython.lnk'
51 a = ipybase
52 mkshortcut(python,'IPython',f,a)
53
54 f = ip_dir + r'\pysh.lnk'
55 a = ipybase+' -p sh'
56 mkshortcut(python,'IPython command prompt mode',f,a)
57
58 f = ip_dir + r'\scipy.lnk'
59 a = ipybase+' -pylab -p scipy'
60 mkshortcut(python,'IPython scipy profile',f,a)
25 if not os.path.isdir(ip_start_menu):
26 os.mkdir(ip_start_menu)
27 directory_created(ip_start_menu)
28
29 # Create .py and .bat files to make things available from
30 # the Windows command line. Thanks to the Twisted project
31 # for this logic!
32 programs = [
33 'ipython',
34 'iptest',
35 'ipcontroller',
36 'ipengine',
37 'ipcluster',
38 'ipythonx',
39 'ipython-wx',
40 'irunner'
41 ]
42 scripts = pjoin(prefix,'scripts')
43 for program in programs:
44 raw = pjoin(scripts, program)
45 bat = raw + '.bat'
46 py = raw + '.py'
47 # Create .py versions of the scripts
48 shutil.copy(raw, py)
49 # Create .bat files for each of the scripts
50 bat_file = file(bat,'w')
51 bat_file.write("@%s %s %%*" % (python, py))
52 bat_file.close()
53
54 # Now move onto setting the Start Menu up
55 ipybase = pjoin(scripts, 'ipython')
61 56
62 # Create documentation shortcuts ...
57 link = pjoin(ip_start_menu, 'IPython.lnk')
58 cmd = '"%s"' % ipybase
59 mkshortcut(python,'IPython',link,cmd)
60
61 link = pjoin(ip_start_menu, 'pysh.lnk')
62 cmd = '"%s" -p sh' % ipybase
63 mkshortcut(python,'IPython (command prompt mode)',link,cmd)
64
65 link = pjoin(ip_start_menu, 'pylab.lnk')
66 cmd = '"%s" -pylab' % ipybase
67 mkshortcut(python,'IPython (PyLab mode)',link,cmd)
68
69 link = pjoin(ip_start_menu, 'scipy.lnk')
70 cmd = '"%s" -pylab -p scipy' % ipybase
71 mkshortcut(python,'IPython (scipy profile)',link,cmd)
72
73 link = pjoin(ip_start_menu, 'IPython test suite.lnk')
74 cmd = '"%s" -vv' % pjoin(scripts, 'iptest')
75 mkshortcut(python,'Run the IPython test suite',link,cmd)
76
77 link = pjoin(ip_start_menu, 'ipcontroller.lnk')
78 cmd = '"%s" -xy' % pjoin(scripts, 'ipcontroller')
79 mkshortcut(python,'IPython controller',link,cmd)
80
81 link = pjoin(ip_start_menu, 'ipengine.lnk')
82 cmd = '"%s"' % pjoin(scripts, 'ipengine')
83 mkshortcut(python,'IPython engine',link,cmd)
84
85 # Create documentation shortcuts ...
63 86 t = prefix + r'\share\doc\ipython\manual\ipython.pdf'
64 f = ip_dir + r'\Manual in PDF.lnk'
87 f = ip_start_menu + r'\Manual in PDF.lnk'
65 88 mkshortcut(t,r'IPython Manual - PDF-Format',f)
66
67 t = prefix + r'\share\doc\ipython\manual\ipython.html'
68 f = ip_dir + r'\Manual in HTML.lnk'
89
90 t = prefix + r'\share\doc\ipython\manual\html\index.html'
91 f = ip_start_menu + r'\Manual in HTML.lnk'
69 92 mkshortcut(t,'IPython Manual - HTML-Format',f)
70
71 # make ipython.py
72 shutil.copy(prefix + r'\scripts\ipython', prefix + r'\scripts\ipython.py')
93
73 94
74 95 def remove():
75 96 """Routine to be run by the win32 installer with the -remove switch."""
76 97 pass
77 98
78 99 # main()
79 100 if len(sys.argv) > 1:
80 101 if sys.argv[1] == '-install':
81 102 install()
82 103 elif sys.argv[1] == '-remove':
83 104 remove()
84 105 else:
85 106 print "Script was called with option %s" % sys.argv[1]
@@ -1,179 +1,189
1 1 #!/usr/bin/env python
2 2 # -*- coding: utf-8 -*-
3 3 """Setup script for IPython.
4 4
5 5 Under Posix environments it works like a typical setup.py script.
6 6 Under Windows, the command sdist is not supported, since IPython
7 7 requires utilities which are not available under Windows."""
8 8
9 9 #-------------------------------------------------------------------------------
10 10 # Copyright (C) 2008 The IPython Development Team
11 11 #
12 12 # Distributed under the terms of the BSD License. The full license is in
13 13 # the file COPYING, distributed as part of this software.
14 14 #-------------------------------------------------------------------------------
15 15
16 16 #-------------------------------------------------------------------------------
17 17 # Imports
18 18 #-------------------------------------------------------------------------------
19 19
20 20 # Stdlib imports
21 21 import os
22 22 import sys
23 23
24 24 from glob import glob
25 25
26 26 # BEFORE importing distutils, remove MANIFEST. distutils doesn't properly
27 27 # update it when the contents of directories change.
28 28 if os.path.exists('MANIFEST'): os.remove('MANIFEST')
29 29
30 30 from distutils.core import setup
31 31
32 32 # Local imports
33 33 from IPython.genutils import target_update
34 34
35 35 from setupbase import (
36 36 setup_args,
37 37 find_packages,
38 38 find_package_data,
39 39 find_scripts,
40 40 find_data_files,
41 41 check_for_dependencies
42 42 )
43 43
44 44 isfile = os.path.isfile
45 45
46 46 #-------------------------------------------------------------------------------
47 47 # Handle OS specific things
48 48 #-------------------------------------------------------------------------------
49 49
50 50 if os.name == 'posix':
51 51 os_name = 'posix'
52 52 elif os.name in ['nt','dos']:
53 53 os_name = 'windows'
54 54 else:
55 55 print 'Unsupported operating system:',os.name
56 56 sys.exit(1)
57 57
58 58 # Under Windows, 'sdist' has not been supported. Now that the docs build with
59 59 # Sphinx it might work, but let's not turn it on until someone confirms that it
60 60 # actually works.
61 61 if os_name == 'windows' and 'sdist' in sys.argv:
62 62 print 'The sdist command is not available under Windows. Exiting.'
63 63 sys.exit(1)
64 64
65 65 #-------------------------------------------------------------------------------
66 66 # Things related to the IPython documentation
67 67 #-------------------------------------------------------------------------------
68 68
69 69 # update the manuals when building a source dist
70 70 if len(sys.argv) >= 2 and sys.argv[1] in ('sdist','bdist_rpm'):
71 71 import textwrap
72 72
73 73 # List of things to be updated. Each entry is a triplet of args for
74 74 # target_update()
75 75 to_update = [
76 76 # FIXME - Disabled for now: we need to redo an automatic way
77 77 # of generating the magic info inside the rst.
78 78 #('docs/magic.tex',
79 79 #['IPython/Magic.py'],
80 80 #"cd doc && ./update_magic.sh" ),
81 81
82 82 ('docs/man/ipython.1.gz',
83 83 ['docs/man/ipython.1'],
84 84 "cd docs/man && gzip -9c ipython.1 > ipython.1.gz"),
85 85
86 86 ('docs/man/pycolor.1.gz',
87 87 ['docs/man/pycolor.1'],
88 88 "cd docs/man && gzip -9c pycolor.1 > pycolor.1.gz"),
89 89 ]
90 90
91 # Only build the docs is sphinx is present
91 # Only build the docs if sphinx is present
92 92 try:
93 93 import sphinx
94 94 except ImportError:
95 95 pass
96 96 else:
97 pass
98 # BEG: This is disabled as I am not sure what to depend on.
99 # I actually don't think we should be automatically building
100 # the docs for people.
101 # The do_sphinx scripts builds html and pdf, so just one
102 # target is enough to cover all manual generation
103 # to_update.append(
104 # ('docs/manual/ipython.pdf',
105 # ['IPython/Release.py','docs/source/ipython.rst'],
106 # "cd docs && python do_sphinx.py")
107 # )
97 # The Makefile calls the do_sphinx scripts to build html and pdf, so
98 # just one target is enough to cover all manual generation
99
100 # First, compute all the dependencies that can force us to rebuild the
101 # docs. Start with the main release file that contains metadata
102 docdeps = ['IPython/Release.py']
103 # Inculde all the reST sources
104 pjoin = os.path.join
105 for dirpath,dirnames,filenames in os.walk('docs/source'):
106 if dirpath in ['_static','_templates']:
107 continue
108 docdeps += [ pjoin(dirpath,f) for f in filenames
109 if f.endswith('.txt') ]
110 # and the examples
111 for dirpath,dirnames,filenames in os.walk('docs/example'):
112 docdeps += [ pjoin(dirpath,f) for f in filenames
113 if not f.endswith('~') ]
114 # then, make them all dependencies for the main PDF (the html will get
115 # auto-generated as well).
116 to_update.append(
117 ('docs/dist/ipython.pdf',
118 docdeps,
119 "cd docs && make dist")
120 )
108 121
109 122 [ target_update(*t) for t in to_update ]
110 123
111 # Build the docs
112 os.system('cd docs && make dist')
113 124
114 125 #---------------------------------------------------------------------------
115 126 # Find all the packages, package data, scripts and data_files
116 127 #---------------------------------------------------------------------------
117 128
118 129 packages = find_packages()
119 130 package_data = find_package_data()
120 131 scripts = find_scripts()
121 132 data_files = find_data_files()
122 133
123 134 #---------------------------------------------------------------------------
124 135 # Handle dependencies and setuptools specific things
125 136 #---------------------------------------------------------------------------
126 137
127 138 # This dict is used for passing extra arguments that are setuptools
128 139 # specific to setup
129 140 setuptools_extra_args = {}
130 141
131 142 if 'setuptools' in sys.modules:
132 143 setuptools_extra_args['zip_safe'] = False
133 144 setuptools_extra_args['entry_points'] = {
134 145 'console_scripts': [
135 146 'ipython = IPython.ipapi:launch_new_instance',
136 147 'pycolor = IPython.PyColorize:main',
137 148 'ipcontroller = IPython.kernel.scripts.ipcontroller:main',
138 149 'ipengine = IPython.kernel.scripts.ipengine:main',
139 150 'ipcluster = IPython.kernel.scripts.ipcluster:main',
140 'ipythonx = IPython.frontend.wx.ipythonx:main'
151 'ipythonx = IPython.frontend.wx.ipythonx:main',
152 'iptest = IPython.testing.iptest:main',
141 153 ]
142 154 }
143 setup_args["extras_require"] = dict(
155 setup_args['extras_require'] = dict(
144 156 kernel = [
145 "zope.interface>=3.4.1",
146 "Twisted>=8.0.1",
147 "foolscap>=0.2.6"
157 'zope.interface>=3.4.1',
158 'Twisted>=8.0.1',
159 'foolscap>=0.2.6'
148 160 ],
149 doc=['Sphinx>=0.3','pygments'],
161 doc='Sphinx>=0.3',
150 162 test='nose>=0.10.1',
151 security=["pyOpenSSL>=0.6"]
163 security='pyOpenSSL>=0.6'
152 164 )
153 165 # Allow setuptools to handle the scripts
154 166 scripts = []
155 # eggs will lack docs, examples
156 data_files = []
157 167 else:
158 168 # package_data of setuptools was introduced to distutils in 2.4
159 169 cfgfiles = filter(isfile, glob('IPython/UserConfig/*'))
160 170 if sys.version_info < (2,4):
161 171 data_files.append(('lib', 'IPython/UserConfig', cfgfiles))
162 172 # If we are running without setuptools, call this function which will
163 173 # check for dependencies an inform the user what is needed. This is
164 174 # just to make life easy for users.
165 175 check_for_dependencies()
166 176
167 177
168 178 #---------------------------------------------------------------------------
169 179 # Do the actual setup now
170 180 #---------------------------------------------------------------------------
171 181
172 182 setup_args['packages'] = packages
173 183 setup_args['package_data'] = package_data
174 184 setup_args['scripts'] = scripts
175 185 setup_args['data_files'] = data_files
176 186 setup_args.update(setuptools_extra_args)
177 187
178 188 if __name__ == '__main__':
179 189 setup(**setup_args)
@@ -1,275 +1,279
1 1 # encoding: utf-8
2 2
3 3 """
4 4 This module defines the things that are used in setup.py for building IPython
5 5
6 6 This includes:
7 7
8 8 * The basic arguments to setup
9 9 * Functions for finding things like packages, package data, etc.
10 10 * A function for checking dependencies.
11 11 """
12 12
13 13 __docformat__ = "restructuredtext en"
14 14
15 15 #-------------------------------------------------------------------------------
16 16 # Copyright (C) 2008 The IPython Development Team
17 17 #
18 18 # Distributed under the terms of the BSD License. The full license is in
19 19 # the file COPYING, distributed as part of this software.
20 20 #-------------------------------------------------------------------------------
21 21
22 22 #-------------------------------------------------------------------------------
23 23 # Imports
24 24 #-------------------------------------------------------------------------------
25 25
26 26 import os, sys
27 27
28 28 from glob import glob
29 29
30 30 from setupext import install_data_ext
31 31
32 32 #-------------------------------------------------------------------------------
33 33 # Useful globals and utility functions
34 34 #-------------------------------------------------------------------------------
35 35
36 36 # A few handy globals
37 37 isfile = os.path.isfile
38 38 pjoin = os.path.join
39 39
40 40 def oscmd(s):
41 41 print ">", s
42 42 os.system(s)
43 43
44 44 # A little utility we'll need below, since glob() does NOT allow you to do
45 45 # exclusion on multiple endings!
46 46 def file_doesnt_endwith(test,endings):
47 47 """Return true if test is a file and its name does NOT end with any
48 48 of the strings listed in endings."""
49 49 if not isfile(test):
50 50 return False
51 51 for e in endings:
52 52 if test.endswith(e):
53 53 return False
54 54 return True
55 55
56 56 #---------------------------------------------------------------------------
57 57 # Basic project information
58 58 #---------------------------------------------------------------------------
59 59
60 60 # Release.py contains version, authors, license, url, keywords, etc.
61 61 execfile(pjoin('IPython','Release.py'))
62 62
63 63 # Create a dict with the basic information
64 64 # This dict is eventually passed to setup after additional keys are added.
65 65 setup_args = dict(
66 66 name = name,
67 67 version = version,
68 68 description = description,
69 69 long_description = long_description,
70 70 author = author,
71 71 author_email = author_email,
72 72 url = url,
73 73 download_url = download_url,
74 74 license = license,
75 75 platforms = platforms,
76 76 keywords = keywords,
77 77 cmdclass = {'install_data': install_data_ext},
78 78 )
79 79
80 80
81 81 #---------------------------------------------------------------------------
82 82 # Find packages
83 83 #---------------------------------------------------------------------------
84 84
85 85 def add_package(packages,pname,config=False,tests=False,scripts=False,
86 86 others=None):
87 87 """
88 88 Add a package to the list of packages, including certain subpackages.
89 89 """
90 90 packages.append('.'.join(['IPython',pname]))
91 91 if config:
92 92 packages.append('.'.join(['IPython',pname,'config']))
93 93 if tests:
94 94 packages.append('.'.join(['IPython',pname,'tests']))
95 95 if scripts:
96 96 packages.append('.'.join(['IPython',pname,'scripts']))
97 97 if others is not None:
98 98 for o in others:
99 99 packages.append('.'.join(['IPython',pname,o]))
100 100
101 101 def find_packages():
102 102 """
103 103 Find all of IPython's packages.
104 104 """
105 105 packages = ['IPython']
106 106 add_package(packages, 'config', tests=True)
107 107 add_package(packages , 'Extensions')
108 108 add_package(packages, 'external')
109 109 add_package(packages, 'gui')
110 110 add_package(packages, 'gui.wx')
111 111 add_package(packages, 'frontend', tests=True)
112 112 add_package(packages, 'frontend._process')
113 113 add_package(packages, 'frontend.wx')
114 114 add_package(packages, 'frontend.cocoa', tests=True)
115 115 add_package(packages, 'kernel', config=True, tests=True, scripts=True)
116 116 add_package(packages, 'kernel.core', config=True, tests=True)
117 117 add_package(packages, 'testing', tests=True)
118 add_package(packages, 'tests')
119 add_package(packages, 'testing.plugin', tests=False)
118 120 add_package(packages, 'tools', tests=True)
119 121 add_package(packages, 'UserConfig')
120 122 return packages
121 123
122 124 #---------------------------------------------------------------------------
123 125 # Find package data
124 126 #---------------------------------------------------------------------------
125 127
126 128 def find_package_data():
127 129 """
128 130 Find IPython's package_data.
129 131 """
130 132 # This is not enough for these things to appear in an sdist.
131 133 # We need to muck with the MANIFEST to get this to work
132 134 package_data = {
133 135 'IPython.UserConfig' : ['*'],
134 136 'IPython.tools.tests' : ['*.txt'],
135 137 'IPython.testing' : ['*.txt']
136 138 }
137 139 return package_data
138 140
139 141
140 142 #---------------------------------------------------------------------------
141 143 # Find data files
142 144 #---------------------------------------------------------------------------
143 145
144 146 def make_dir_struct(tag,base,out_base):
145 147 """Make the directory structure of all files below a starting dir.
146 148
147 149 This is just a convenience routine to help build a nested directory
148 150 hierarchy because distutils is too stupid to do this by itself.
149 151
150 152 XXX - this needs a proper docstring!
151 153 """
152 154
153 155 # we'll use these a lot below
154 156 lbase = len(base)
155 157 pathsep = os.path.sep
156 158 lpathsep = len(pathsep)
157 159
158 160 out = []
159 161 for (dirpath,dirnames,filenames) in os.walk(base):
160 162 # we need to strip out the dirpath from the base to map it to the
161 163 # output (installation) path. This requires possibly stripping the
162 164 # path separator, because otherwise pjoin will not work correctly
163 165 # (pjoin('foo/','/bar') returns '/bar').
164 166
165 167 dp_eff = dirpath[lbase:]
166 168 if dp_eff.startswith(pathsep):
167 169 dp_eff = dp_eff[lpathsep:]
168 170 # The output path must be anchored at the out_base marker
169 171 out_path = pjoin(out_base,dp_eff)
170 172 # Now we can generate the final filenames. Since os.walk only produces
171 173 # filenames, we must join back with the dirpath to get full valid file
172 174 # paths:
173 175 pfiles = [pjoin(dirpath,f) for f in filenames]
174 176 # Finally, generate the entry we need, which is a triple of (tag,output
175 177 # path, files) for use as a data_files parameter in install_data.
176 178 out.append((tag,out_path,pfiles))
177 179
178 180 return out
179 181
180 182
181 183 def find_data_files():
182 184 """
183 185 Find IPython's data_files.
184 186
185 187 Most of these are docs.
186 188 """
187 189
188 190 docdirbase = 'share/doc/ipython'
189 191 manpagebase = 'share/man/man1'
190 192
191 193 # Simple file lists can be made by hand
192 194 manpages = filter(isfile, glob('docs/man/*.1.gz'))
193 195 igridhelpfiles = filter(isfile, glob('IPython/Extensions/igrid_help.*'))
194 196
195 197 # For nested structures, use the utility above
196 198 example_files = make_dir_struct('data','docs/examples',
197 199 pjoin(docdirbase,'examples'))
198 200 manual_files = make_dir_struct('data','docs/dist',pjoin(docdirbase,'manual'))
199 201
200 202 # And assemble the entire output list
201 203 data_files = [ ('data',manpagebase, manpages),
202 204 ('data',pjoin(docdirbase,'extensions'),igridhelpfiles),
203 205 ] + manual_files + example_files
204 206
205 207 ## import pprint # dbg
206 208 ## print '*'*80
207 209 ## print 'data files'
208 210 ## pprint.pprint(data_files)
209 211 ## print '*'*80
210 212
211 213 return data_files
212 214
213 215 #---------------------------------------------------------------------------
214 216 # Find scripts
215 217 #---------------------------------------------------------------------------
216 218
217 219 def find_scripts():
218 220 """
219 221 Find IPython's scripts.
220 222 """
221 223 scripts = ['IPython/kernel/scripts/ipengine',
222 224 'IPython/kernel/scripts/ipcontroller',
223 'IPython/kernel/scripts/ipcluster',
225 'IPython/kernel/scripts/ipcluster',
224 226 'scripts/ipython',
225 227 'scripts/ipythonx',
228 'scripts/ipython-wx',
226 229 'scripts/pycolor',
227 230 'scripts/irunner',
231 'scripts/iptest',
228 232 ]
229 233
230 234 # Script to be run by the windows binary installer after the default setup
231 235 # routine, to add shortcuts and similar windows-only things. Windows
232 236 # post-install scripts MUST reside in the scripts/ dir, otherwise distutils
233 237 # doesn't find them.
234 238 if 'bdist_wininst' in sys.argv:
235 239 if len(sys.argv) > 2 and ('sdist' in sys.argv or 'bdist_rpm' in sys.argv):
236 240 print >> sys.stderr,"ERROR: bdist_wininst must be run alone. Exiting."
237 241 sys.exit(1)
238 242 scripts.append('scripts/ipython_win_post_install.py')
239 243
240 244 return scripts
241 245
242 246 #---------------------------------------------------------------------------
243 247 # Verify all dependencies
244 248 #---------------------------------------------------------------------------
245 249
246 250 def check_for_dependencies():
247 251 """Check for IPython's dependencies.
248 252
249 253 This function should NOT be called if running under setuptools!
250 254 """
251 255 from setupext.setupext import (
252 256 print_line, print_raw, print_status, print_message,
253 257 check_for_zopeinterface, check_for_twisted,
254 258 check_for_foolscap, check_for_pyopenssl,
255 259 check_for_sphinx, check_for_pygments,
256 260 check_for_nose, check_for_pexpect
257 261 )
258 262 print_line()
259 263 print_raw("BUILDING IPYTHON")
260 264 print_status('python', sys.version)
261 265 print_status('platform', sys.platform)
262 266 if sys.platform == 'win32':
263 267 print_status('Windows version', sys.getwindowsversion())
264 268
265 269 print_raw("")
266 270 print_raw("OPTIONAL DEPENDENCIES")
267 271
268 272 check_for_zopeinterface()
269 273 check_for_twisted()
270 274 check_for_foolscap()
271 275 check_for_pyopenssl()
272 276 check_for_sphinx()
273 277 check_for_pygments()
274 278 check_for_nose()
275 279 check_for_pexpect()
@@ -1,20 +1,14
1 1 #!/usr/bin/env python
2 2 """Wrapper to run setup.py using setuptools."""
3 3
4 import os
5 4 import sys
6 5
7 # Add my local path to sys.path
8 home = os.environ['HOME']
9 sys.path.insert(0,'%s/usr/local/lib/python%s/site-packages' %
10 (home,sys.version[:3]))
11
12 6 # now, import setuptools and call the actual setup
13 7 import setuptools
14 8 # print sys.argv
15 9 #sys.argv=['','bdist_egg']
16 10 execfile('setup.py')
17 11
18 12 # clean up the junk left around by setuptools
19 13 if "develop" not in sys.argv:
20 14 os.system('rm -rf ipython.egg-info build')
@@ -1,15 +1,15
1 1 from IPython.external.path import path
2 2 fs = path('..').walkfiles('*.py')
3 3
4 4 for f in fs:
5 5 errs = ''
6 6 cont = f.bytes()
7 7 if '\t' in cont:
8 8 errs+='t'
9 9
10 10 if '\r' in cont:
11 11 errs+='r'
12 12
13 13 if errs:
14 14 print "%3s" % errs, f
15 No newline at end of file
15
@@ -1,101 +1,58
1 1 #!/bin/sh
2 2 # IPython release script
3 3
4 4 PYVER=`python -V 2>&1 | awk '{print $2}' | awk -F '.' '{print $1$2}' `
5 5 version=`ipython -Version`
6 6 ipdir=~/ipython/ipython
7 7 ipbackupdir=~/ipython/backup
8 8
9 9 echo
10 10 echo "Releasing IPython version $version"
11 11 echo "=================================="
12 12
13 echo "Marking ChangeLog with release information and making NEWS file..."
14
15 # Stamp changelog and save a copy of the status at each version, in case later
16 # we want the NEWS file to start from a point before the very last release (if
17 # very small interim releases have no significant changes).
18
19 cd $ipdir/doc
20 cp ChangeLog ChangeLog.old
21 cp ChangeLog ChangeLog.$version
22 daystamp=`date +%Y-%m-%d`
23 echo $daystamp " ***" Released version $version > ChangeLog
24 echo >> ChangeLog
25 cat ChangeLog.old >> ChangeLog
26 rm ChangeLog.old
27
28 # Build NEWS file
29 echo "Changes between the last two releases (major or minor)" > NEWS
30 echo "Note that this is an auto-generated diff of the ChangeLogs" >> NEWS
31 echo >> NEWS
32 diff ChangeLog.previous ChangeLog | grep -v '^0a' | sed 's/^> //g' >> NEWS
33 cp ChangeLog ChangeLog.previous
34
35 # Clean up build/dist directories
36 rm -rf $ipdir/build/*
37 rm -rf $ipdir/dist/*
38
39 13 # Perform local backup
40 14 cd $ipdir/tools
41 15 ./make_tarball.py
42 16 mv ipython-*.tgz $ipbackupdir
43 17
18 # Clean up build/dist directories
19 rm -rf $ipdir/build/*
20 rm -rf $ipdir/dist/*
21
44 22 # Build source and binary distros
45 23 cd $ipdir
46 24 ./setup.py sdist --formats=gztar
47 25
48 26 # Build version-specific RPMs, where we must use the --python option to ensure
49 27 # that the resulting RPM is really built with the requested python version (so
50 28 # things go to lib/python2.X/...)
51 python2.4 ./setup.py bdist_rpm --binary-only --release=py24 --python=/usr/bin/python2.4
52 python2.5 ./setup.py bdist_rpm --binary-only --release=py25 --python=/usr/bin/python2.5
29 #python2.4 ./setup.py bdist_rpm --binary-only --release=py24 --python=/usr/bin/python2.4
30 #python2.5 ./setup.py bdist_rpm --binary-only --release=py25 --python=/usr/bin/python2.5
53 31
54 32 # Build eggs
55 33 python2.4 ./setup_bdist_egg.py
56 34 python2.5 ./setup_bdist_egg.py
57 35
58 36 # Call the windows build separately, so that the extra Windows scripts don't
59 37 # get pulled into Unix builds (setup.py has code which checks for
60 38 # bdist_wininst)
61 39 ./setup.py bdist_wininst --install-script=ipython_win_post_install.py
62 40
63 41 # Change name so retarded Vista runs the installer correctly
64 42 rename 's/win32/win32-setup/' $ipdir/dist/*.exe
65 43
66 44 # Register with the Python Package Index (PyPI)
67 45 echo "Registering with PyPI..."
68 46 cd $ipdir
69 47 ./setup.py register
70 48
71 49 # Upload all files
72 50 cd $ipdir/dist
73 51 echo "Uploading distribution files..."
74 52 scp * ipython@ipython.scipy.org:www/dist/
75 53
76 54 echo "Uploading backup files..."
77 55 cd $ipbackupdir
78 56 scp `ls -1tr *tgz | tail -1` ipython@ipython.scipy.org:www/backup/
79 57
80 echo "Updating webpage..."
81 cd $ipdir/doc
82 www=~/ipython/homepage
83 cp ChangeLog NEWS $www
84 rm -rf $www/doc/*
85 cp -r manual/ $www/doc
86 cd $www
87 ./update
88
89 # Alert package maintainers
90 #echo "Alerting package maintainers..."
91 #maintainers='fernando.perez@berkeley.edu ariciputi@users.sourceforge.net jack@xiph.org tretkowski@inittab.de dryice@hotpop.com willmaier@ml1.net'
92 # maintainers='fernando.perez@berkeley.edu'
93
94 # for email in $maintainers
95 # do
96 # echo "Emailing $email..."
97 # mail -s "[Package maintainer notice] A new IPython is out. Version: $version" \
98 # $email < NEWS
99 # done
100
101 58 echo "Done!"
@@ -1,37 +1,31
1 1 #!/bin/sh
2 2
3 3 # release test
4 4
5 ipdir=~/ipython/ipython
6 ipbackupdir=~/ipython/backup
5 ipdir=$PWD/..
7 6
8 7 cd $ipdir
9 8
10 9 # Clean up build/dist directories
11 10 rm -rf $ipdir/build/*
12 11 rm -rf $ipdir/dist/*
13 12
14 # Perform local backup
15 cd $ipdir/tools
16 ./make_tarball.py
17 mv ipython-*.tgz $ipbackupdir
18
19 13 # build source distros
20 14 cd $ipdir
21 15 ./setup.py sdist --formats=gztar
22 16
23 17 # Build rpms
24 #python2.4 ./setup.py bdist_rpm --binary-only --release=py24 --python=/usr/bin/python2.4
25 #python2.5 ./setup.py bdist_rpm --binary-only --release=py25 --python=/usr/bin/python2.5
18 python2.4 ./setup.py bdist_rpm --binary-only --release=py24 --python=/usr/bin/python2.4
19 python2.5 ./setup.py bdist_rpm --binary-only --release=py25 --python=/usr/bin/python2.5
26 20
27 21 # Build eggs
28 22 python2.4 ./setup_bdist_egg.py
29 23 python2.5 ./setup_bdist_egg.py
30 24
31 25 # Call the windows build separately, so that the extra Windows scripts don't
32 26 # get pulled into Unix builds (setup.py has code which checks for
33 27 # bdist_wininst)
34 28 ./setup.py bdist_wininst --install-script=ipython_win_post_install.py
35 29
36 30 # Change name so retarded Vista runs the installer correctly
37 31 rename 's/win32/win32-setup/' $ipdir/dist/*.exe
@@ -1,6 +1,6
1 1 #!/bin/sh
2 2
3 # clean public testing/ dir and upload
4 #ssh "rm -f ipython@ipython.scipy.org:www/dist/testing/*"
5 cd ~/ipython/ipython/dist
3 ipdir=$PWD/..
4
5 cd $ipdir/dist
6 6 scp * ipython@ipython.scipy.org:www/dist/testing/
This diff has been collapsed as it changes many lines, (660 lines changed) Show them Hide them
@@ -1,660 +0,0
1 """ ILeo - Leo plugin for IPython
2
3
4 """
5 import IPython.ipapi
6 import IPython.genutils
7 import IPython.generics
8 from IPython.hooks import CommandChainDispatcher
9 import re
10 import UserDict
11 from IPython.ipapi import TryNext
12 import IPython.macro
13 import IPython.Shell
14
15 _leo_push_history = set()
16
17 def init_ipython(ipy):
18 """ This will be run by _ip.load('ipy_leo')
19
20 Leo still needs to run update_commander() after this.
21
22 """
23 global ip
24 ip = ipy
25 IPython.Shell.hijack_tk()
26 ip.set_hook('complete_command', mb_completer, str_key = '%mb')
27 ip.expose_magic('mb',mb_f)
28 ip.expose_magic('lee',lee_f)
29 ip.expose_magic('leoref',leoref_f)
30 ip.expose_magic('lleo',lleo_f)
31 # Note that no other push command should EVER have lower than 0
32 expose_ileo_push(push_mark_req, -1)
33 expose_ileo_push(push_cl_node,100)
34 # this should be the LAST one that will be executed, and it will never raise TryNext
35 expose_ileo_push(push_ipython_script, 1000)
36 expose_ileo_push(push_plain_python, 100)
37 expose_ileo_push(push_ev_node, 100)
38 ip.set_hook('pre_prompt_hook', ileo_pre_prompt_hook)
39 global wb
40 wb = LeoWorkbook()
41 ip.user_ns['wb'] = wb
42
43
44 first_launch = True
45
46 def update_commander(new_leox):
47 """ Set the Leo commander to use
48
49 This will be run every time Leo does ipython-launch; basically,
50 when the user switches the document he is focusing on, he should do
51 ipython-launch to tell ILeo what document the commands apply to.
52
53 """
54
55 global first_launch
56 if first_launch:
57 show_welcome()
58 first_launch = False
59
60 global c,g
61 c,g = new_leox.c, new_leox.g
62 print "Set Leo Commander:",c.frame.getTitle()
63
64 # will probably be overwritten by user, but handy for experimentation early on
65 ip.user_ns['c'] = c
66 ip.user_ns['g'] = g
67 ip.user_ns['_leo'] = new_leox
68
69 new_leox.push = push_position_from_leo
70 run_leo_startup_node()
71
72 from IPython.external.simplegeneric import generic
73 import pprint
74
75 def es(s):
76 g.es(s, tabName = 'IPython')
77 pass
78
79 @generic
80 def format_for_leo(obj):
81 """ Convert obj to string representiation (for editing in Leo)"""
82 return pprint.pformat(obj)
83
84 # Just an example - note that this is a bad to actually do!
85 #@format_for_leo.when_type(list)
86 #def format_list(obj):
87 # return "\n".join(str(s) for s in obj)
88
89
90 attribute_re = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*$')
91 def valid_attribute(s):
92 return attribute_re.match(s)
93
94 _rootnode = None
95 def rootnode():
96 """ Get ileo root node (@ipy-root)
97
98 if node has become invalid or has not been set, return None
99
100 Note that the root is the *first* @ipy-root item found
101 """
102 global _rootnode
103 if _rootnode is None:
104 return None
105 if c.positionExists(_rootnode.p):
106 return _rootnode
107 _rootnode = None
108 return None
109
110 def all_cells():
111 global _rootnode
112 d = {}
113 r = rootnode()
114 if r is not None:
115 nodes = r.p.children_iter()
116 else:
117 nodes = c.allNodes_iter()
118
119 for p in nodes:
120 h = p.headString()
121 if h.strip() == '@ipy-root':
122 # update root node (found it for the first time)
123 _rootnode = LeoNode(p)
124 # the next recursive call will use the children of new root
125 return all_cells()
126
127 if h.startswith('@a '):
128 d[h.lstrip('@a ').strip()] = p.parent().copy()
129 elif not valid_attribute(h):
130 continue
131 d[h] = p.copy()
132 return d
133
134 def eval_node(n):
135 body = n.b
136 if not body.startswith('@cl'):
137 # plain python repr node, just eval it
138 return ip.ev(n.b)
139 # @cl nodes deserve special treatment - first eval the first line (minus cl), then use it to call the rest of body
140 first, rest = body.split('\n',1)
141 tup = first.split(None, 1)
142 # @cl alone SPECIAL USE-> dump var to user_ns
143 if len(tup) == 1:
144 val = ip.ev(rest)
145 ip.user_ns[n.h] = val
146 es("%s = %s" % (n.h, repr(val)[:20] ))
147 return val
148
149 cl, hd = tup
150
151 xformer = ip.ev(hd.strip())
152 es('Transform w/ %s' % repr(xformer))
153 return xformer(rest, n)
154
155 class LeoNode(object, UserDict.DictMixin):
156 """ Node in Leo outline
157
158 Most important attributes (getters/setters available:
159 .v - evaluate node, can also be alligned
160 .b, .h - body string, headline string
161 .l - value as string list
162
163 Also supports iteration,
164
165 setitem / getitem (indexing):
166 wb.foo['key'] = 12
167 assert wb.foo['key'].v == 12
168
169 Note the asymmetry on setitem and getitem! Also other
170 dict methods are available.
171
172 .ipush() - run push-to-ipython
173
174 Minibuffer command access (tab completion works):
175
176 mb save-to-file
177
178 """
179 def __init__(self,p):
180 self.p = p.copy()
181
182 def __str__(self):
183 return "<LeoNode %s>" % str(self.p)
184
185 __repr__ = __str__
186
187 def __get_h(self): return self.p.headString()
188 def __set_h(self,val):
189 c.setHeadString(self.p,val)
190 LeoNode.last_edited = self
191 c.redraw()
192
193 h = property( __get_h, __set_h, doc = "Node headline string")
194
195 def __get_b(self): return self.p.bodyString()
196 def __set_b(self,val):
197 c.setBodyString(self.p, val)
198 LeoNode.last_edited = self
199 c.redraw()
200
201 b = property(__get_b, __set_b, doc = "Nody body string")
202
203 def __set_val(self, val):
204 self.b = format_for_leo(val)
205
206 v = property(lambda self: eval_node(self), __set_val, doc = "Node evaluated value")
207
208 def __set_l(self,val):
209 self.b = '\n'.join(val )
210 l = property(lambda self : IPython.genutils.SList(self.b.splitlines()),
211 __set_l, doc = "Node value as string list")
212
213 def __iter__(self):
214 """ Iterate through nodes direct children """
215
216 return (LeoNode(p) for p in self.p.children_iter())
217
218 def __children(self):
219 d = {}
220 for child in self:
221 head = child.h
222 tup = head.split(None,1)
223 if len(tup) > 1 and tup[0] == '@k':
224 d[tup[1]] = child
225 continue
226
227 if not valid_attribute(head):
228 d[head] = child
229 continue
230 return d
231 def keys(self):
232 d = self.__children()
233 return d.keys()
234 def __getitem__(self, key):
235 """ wb.foo['Some stuff'] Return a child node with headline 'Some stuff'
236
237 If key is a valid python name (e.g. 'foo'), look for headline '@k foo' as well
238 """
239 key = str(key)
240 d = self.__children()
241 return d[key]
242 def __setitem__(self, key, val):
243 """ You can do wb.foo['My Stuff'] = 12 to create children
244
245 This will create 'My Stuff' as a child of foo (if it does not exist), and
246 do .v = 12 assignment.
247
248 Exception:
249
250 wb.foo['bar'] = 12
251
252 will create a child with headline '@k bar', because bar is a valid python name
253 and we don't want to crowd the WorkBook namespace with (possibly numerous) entries
254 """
255 key = str(key)
256 d = self.__children()
257 if key in d:
258 d[key].v = val
259 return
260
261 if not valid_attribute(key):
262 head = key
263 else:
264 head = '@k ' + key
265 p = c.createLastChildNode(self.p, head, '')
266 LeoNode(p).v = val
267
268 def __delitem__(self, key):
269 """ Remove child
270
271 Allows stuff like wb.foo.clear() to remove all children
272 """
273 self[key].p.doDelete()
274 c.redraw()
275
276 def ipush(self):
277 """ Does push-to-ipython on the node """
278 push_from_leo(self)
279
280 def go(self):
281 """ Set node as current node (to quickly see it in Outline) """
282 c.setCurrentPosition(self.p)
283 c.redraw()
284
285 def append(self):
286 """ Add new node as the last child, return the new node """
287 p = self.p.insertAsLastChild()
288 return LeoNode(p)
289
290
291 def script(self):
292 """ Method to get the 'tangled' contents of the node
293
294 (parse @others, << section >> references etc.)
295 """
296 return g.getScript(c,self.p,useSelectedText=False,useSentinels=False)
297
298 def __get_uA(self):
299 p = self.p
300 # Create the uA if necessary.
301 if not hasattr(p.v.t,'unknownAttributes'):
302 p.v.t.unknownAttributes = {}
303
304 d = p.v.t.unknownAttributes.setdefault('ipython', {})
305 return d
306
307 uA = property(__get_uA, doc = "Access persistent unknownAttributes of node")
308
309
310 class LeoWorkbook:
311 """ class for 'advanced' node access
312
313 Has attributes for all "discoverable" nodes. Node is discoverable if it
314 either
315
316 - has a valid python name (Foo, bar_12)
317 - is a parent of an anchor node (if it has a child '@a foo', it is visible as foo)
318
319 """
320 def __getattr__(self, key):
321 if key.startswith('_') or key == 'trait_names' or not valid_attribute(key):
322 raise AttributeError
323 cells = all_cells()
324 p = cells.get(key, None)
325 if p is None:
326 return add_var(key)
327
328 return LeoNode(p)
329
330 def __str__(self):
331 return "<LeoWorkbook>"
332 def __setattr__(self,key, val):
333 raise AttributeError("Direct assignment to workbook denied, try wb.%s.v = %s" % (key,val))
334
335 __repr__ = __str__
336
337 def __iter__(self):
338 """ Iterate all (even non-exposed) nodes """
339 cells = all_cells()
340 return (LeoNode(p) for p in c.allNodes_iter())
341
342 current = property(lambda self: LeoNode(c.currentPosition()), doc = "Currently selected node")
343
344 def match_h(self, regex):
345 cmp = re.compile(regex)
346 for node in self:
347 if re.match(cmp, node.h, re.IGNORECASE):
348 yield node
349 return
350
351 def require(self, req):
352 """ Used to control node push dependencies
353
354 Call this as first statement in nodes. If node has not been pushed, it will be pushed before proceeding
355
356 E.g. wb.require('foo') will do wb.foo.ipush() if it hasn't been done already
357 """
358
359 if req not in _leo_push_history:
360 es('Require: ' + req)
361 getattr(self,req).ipush()
362
363
364 @IPython.generics.complete_object.when_type(LeoWorkbook)
365 def workbook_complete(obj, prev):
366 return all_cells().keys() + [s for s in prev if not s.startswith('_')]
367
368
369 def add_var(varname):
370 r = rootnode()
371 try:
372 if r is None:
373 p2 = g.findNodeAnywhere(c,varname)
374 else:
375 p2 = g.findNodeInChildren(c, r.p, varname)
376 if p2:
377 return LeoNode(p2)
378
379 if r is not None:
380 p2 = r.p.insertAsLastChild()
381
382 else:
383 p2 = c.currentPosition().insertAfter()
384
385 c.setHeadString(p2,varname)
386 return LeoNode(p2)
387 finally:
388 c.redraw()
389
390 def add_file(self,fname):
391 p2 = c.currentPosition().insertAfter()
392
393 push_from_leo = CommandChainDispatcher()
394
395 def expose_ileo_push(f, prio = 0):
396 push_from_leo.add(f, prio)
397
398 def push_ipython_script(node):
399 """ Execute the node body in IPython, as if it was entered in interactive prompt """
400 try:
401 ohist = ip.IP.output_hist
402 hstart = len(ip.IP.input_hist)
403 script = node.script()
404
405 # The current node _p needs to handle wb.require() and recursive ipushes
406 old_p = ip.user_ns.get('_p',None)
407 ip.user_ns['_p'] = node
408 ip.runlines(script)
409 ip.user_ns['_p'] = old_p
410 if old_p is None:
411 del ip.user_ns['_p']
412
413 has_output = False
414 for idx in range(hstart,len(ip.IP.input_hist)):
415 val = ohist.get(idx,None)
416 if val is None:
417 continue
418 has_output = True
419 inp = ip.IP.input_hist[idx]
420 if inp.strip():
421 es('In: %s' % (inp[:40], ))
422
423 es('<%d> %s' % (idx, pprint.pformat(ohist[idx],width = 40)))
424
425 if not has_output:
426 es('ipy run: %s (%d LL)' %( node.h,len(script)))
427 finally:
428 c.redraw()
429
430
431 def eval_body(body):
432 try:
433 val = ip.ev(body)
434 except:
435 # just use stringlist if it's not completely legal python expression
436 val = IPython.genutils.SList(body.splitlines())
437 return val
438
439 def push_plain_python(node):
440 if not node.h.endswith('P'):
441 raise TryNext
442 script = node.script()
443 lines = script.count('\n')
444 try:
445 exec script in ip.user_ns
446 except:
447 print " -- Exception in script:\n"+script + "\n --"
448 raise
449 es('ipy plain: %s (%d LL)' % (node.h,lines))
450
451
452 def push_cl_node(node):
453 """ If node starts with @cl, eval it
454
455 The result is put as last child of @ipy-results node, if it exists
456 """
457 if not node.b.startswith('@cl'):
458 raise TryNext
459
460 p2 = g.findNodeAnywhere(c,'@ipy-results')
461 val = node.v
462 if p2:
463 es("=> @ipy-results")
464 LeoNode(p2).v = val
465 es(val)
466
467 def push_ev_node(node):
468 """ If headline starts with @ev, eval it and put result in body """
469 if not node.h.startswith('@ev '):
470 raise TryNext
471 expr = node.h.lstrip('@ev ')
472 es('ipy eval ' + expr)
473 res = ip.ev(expr)
474 node.v = res
475
476 def push_mark_req(node):
477 """ This should be the first one that gets called.
478
479 It will mark the node as 'pushed', for wb.require.
480 """
481 _leo_push_history.add(node.h)
482 raise TryNext
483
484
485 def push_position_from_leo(p):
486 try:
487 push_from_leo(LeoNode(p))
488 except AttributeError,e:
489 if e.args == ("Commands instance has no attribute 'frame'",):
490 es("Error: ILeo not associated with .leo document")
491 es("Press alt+shift+I to fix!")
492 else:
493 raise
494
495 @generic
496 def edit_object_in_leo(obj, varname):
497 """ Make it @cl node so it can be pushed back directly by alt+I """
498 node = add_var(varname)
499 formatted = format_for_leo(obj)
500 if not formatted.startswith('@cl'):
501 formatted = '@cl\n' + formatted
502 node.b = formatted
503 node.go()
504
505 @edit_object_in_leo.when_type(IPython.macro.Macro)
506 def edit_macro(obj,varname):
507 bod = '_ip.defmacro("""\\\n' + obj.value + '""")'
508 node = add_var('Macro_' + varname)
509 node.b = bod
510 node.go()
511
512 def get_history(hstart = 0):
513 res = []
514 ohist = ip.IP.output_hist
515
516 for idx in range(hstart, len(ip.IP.input_hist)):
517 val = ohist.get(idx,None)
518 has_output = True
519 inp = ip.IP.input_hist_raw[idx]
520 if inp.strip():
521 res.append('In [%d]: %s' % (idx, inp))
522 if val:
523 res.append(pprint.pformat(val))
524 res.append('\n')
525 return ''.join(res)
526
527
528 def lee_f(self,s):
529 """ Open file(s)/objects in Leo
530
531 - %lee hist -> open full session history in leo
532 - Takes an object. l = [1,2,"hello"]; %lee l. Alt+I in leo pushes the object back
533 - Takes an mglob pattern, e.g. '%lee *.cpp' or %lee 'rec:*.cpp'
534 - Takes input history indices: %lee 4 6-8 10 12-47
535 """
536 import os
537
538 try:
539 if s == 'hist':
540 wb.ipython_history.b = get_history()
541 wb.ipython_history.go()
542 return
543
544
545 if s and s[0].isdigit():
546 # numbers; push input slices to leo
547 lines = self.extract_input_slices(s.strip().split(), True)
548 v = add_var('stored_ipython_input')
549 v.b = '\n'.join(lines)
550 return
551
552
553 # try editing the object directly
554 obj = ip.user_ns.get(s, None)
555 if obj is not None:
556 edit_object_in_leo(obj,s)
557 return
558
559
560 # if it's not object, it's a file name / mglob pattern
561 from IPython.external import mglob
562
563 files = (os.path.abspath(f) for f in mglob.expand(s))
564 for fname in files:
565 p = g.findNodeAnywhere(c,'@auto ' + fname)
566 if not p:
567 p = c.currentPosition().insertAfter()
568
569 p.setHeadString('@auto ' + fname)
570 if os.path.isfile(fname):
571 c.setBodyString(p,open(fname).read())
572 c.selectPosition(p)
573 print "Editing file(s), press ctrl+shift+w in Leo to write @auto nodes"
574 finally:
575 c.redraw()
576
577 def leoref_f(self,s):
578 """ Quick reference for ILeo """
579 import textwrap
580 print textwrap.dedent("""\
581 %lee file/object - open file / object in leo
582 %lleo Launch leo (use if you started ipython first!)
583 wb.foo.v - eval node foo (i.e. headstring is 'foo' or '@ipy foo')
584 wb.foo.v = 12 - assign to body of node foo
585 wb.foo.b - read or write the body of node foo
586 wb.foo.l - body of node foo as string list
587
588 for el in wb.foo:
589 print el.v
590
591 """
592 )
593
594
595
596 def mb_f(self, arg):
597 """ Execute leo minibuffer commands
598
599 Example:
600 mb save-to-file
601 """
602 c.executeMinibufferCommand(arg)
603
604 def mb_completer(self,event):
605 """ Custom completer for minibuffer """
606 cmd_param = event.line.split()
607 if event.line.endswith(' '):
608 cmd_param.append('')
609 if len(cmd_param) > 2:
610 return ip.IP.Completer.file_matches(event.symbol)
611 cmds = c.commandsDict.keys()
612 cmds.sort()
613 return cmds
614
615 def ileo_pre_prompt_hook(self):
616 # this will fail if leo is not running yet
617 try:
618 c.outerUpdate()
619 except NameError:
620 pass
621 raise TryNext
622
623
624
625 def show_welcome():
626 print "------------------"
627 print "Welcome to Leo-enabled IPython session!"
628 print "Try %leoref for quick reference."
629 import IPython.platutils
630 IPython.platutils.set_term_title('ILeo')
631 IPython.platutils.freeze_term_title()
632
633 def run_leo_startup_node():
634 p = g.findNodeAnywhere(c,'@ipy-startup')
635 if p:
636 print "Running @ipy-startup nodes"
637 for n in LeoNode(p):
638 push_from_leo(n)
639
640 def lleo_f(selg, args):
641 """ Launch leo from within IPython
642
643 This command will return immediately when Leo has been
644 launched, leaving a Leo session that is connected
645 with current IPython session (once you press alt+I in leo)
646
647 Usage::
648 lleo foo.leo
649 lleo
650 """
651
652 import shlex, sys
653 argv = ['leo'] + shlex.split(args)
654 sys.argv = argv
655 # if this var exists and is true, leo will "launch" (connect)
656 # ipython immediately when it's started
657 global _request_immediate_connect
658 _request_immediate_connect = True
659 import leo.core.runLeo
660 leo.core.runLeo.run()
@@ -1,357 +0,0
1 # encoding: utf-8
2 # -*- test-case-name: IPython.test.test_shell -*-
3
4 """The core IPython Shell"""
5
6 __docformat__ = "restructuredtext en"
7
8 #-------------------------------------------------------------------------------
9 # Copyright (C) 2008 The IPython Development Team
10 #
11 # Distributed under the terms of the BSD License. The full license is in
12 # the file COPYING, distributed as part of this software.
13 #-------------------------------------------------------------------------------
14
15 #-------------------------------------------------------------------------------
16 # Imports
17 #-------------------------------------------------------------------------------
18
19 import pprint
20 import signal
21 import sys
22 import threading
23 import time
24
25 from code import InteractiveConsole, softspace
26 from StringIO import StringIO
27
28 from IPython.OutputTrap import OutputTrap
29 from IPython import ultraTB
30
31 from IPython.kernel.error import NotDefined
32
33
34 class InteractiveShell(InteractiveConsole):
35 """The Basic IPython Shell class.
36
37 This class provides the basic capabilities of IPython. Currently
38 this class does not do anything IPython specific. That is, it is
39 just a python shell.
40
41 It is modelled on code.InteractiveConsole, but adds additional
42 capabilities. These additional capabilities are what give IPython
43 its power.
44
45 The current version of this class is meant to be a prototype that guides
46 the future design of the IPython core. This class must not use Twisted
47 in any way, but it must be designed in a way that makes it easy to
48 incorporate into Twisted and hook network protocols up to.
49
50 Some of the methods of this class comprise the official IPython core
51 interface. These methods must be tread safe and they must return types
52 that can be easily serialized by protocols such as PB, XML-RPC and SOAP.
53 Locks have been provided for making the methods thread safe, but additional
54 locks can be added as needed.
55
56 Any method that is meant to be a part of the official interface must also
57 be declared in the kernel.coreservice.ICoreService interface. Eventually
58 all other methods should have single leading underscores to note that they
59 are not designed to be 'public.' Currently, because this class inherits
60 from code.InteractiveConsole there are many private methods w/o leading
61 underscores. The interface should be as simple as possible and methods
62 should not be added to the interface unless they really need to be there.
63
64 Note:
65
66 - For now I am using methods named put/get to move objects in/out of the
67 users namespace. Originally, I was calling these methods push/pull, but
68 because code.InteractiveConsole already has a push method, I had to use
69 something different. Eventually, we probably won't subclass this class
70 so we can call these methods whatever we want. So, what do we want to
71 call them?
72 - We need a way of running the trapping of stdout/stderr in different ways.
73 We should be able to i) trap, ii) not trap at all or iii) trap and echo
74 things to stdout and stderr.
75 - How should errors be handled? Should exceptions be raised?
76 - What should methods that don't compute anything return? The default of
77 None?
78 """
79
80 def __init__(self, locals=None, filename="<console>"):
81 """Creates a new TrappingInteractiveConsole object."""
82 InteractiveConsole.__init__(self,locals,filename)
83 self._trap = OutputTrap(debug=0)
84 self._stdin = []
85 self._stdout = []
86 self._stderr = []
87 self._last_type = self._last_traceback = self._last_value = None
88 #self._namespace_lock = threading.Lock()
89 #self._command_lock = threading.Lock()
90 self.lastCommandIndex = -1
91 # I am using this user defined signal to interrupt the currently
92 # running command. I am not sure if this is the best way, but
93 # it is working!
94 # This doesn't work on Windows as it doesn't have this signal.
95 #signal.signal(signal.SIGUSR1, self._handleSIGUSR1)
96
97 # An exception handler. Experimental: later we need to make the
98 # modes/colors available to user configuration, etc.
99 self.tbHandler = ultraTB.FormattedTB(color_scheme='NoColor',
100 mode='Context',
101 tb_offset=2)
102
103 def _handleSIGUSR1(self, signum, frame):
104 """Handle the SIGUSR1 signal by printing to stderr."""
105 print>>sys.stderr, "Command stopped."
106
107 def _prefilter(self, line, more):
108 return line
109
110 def _trapRunlines(self, lines):
111 """
112 This executes the python source code, source, in the
113 self.locals namespace and traps stdout and stderr. Upon
114 exiting, self.out and self.err contain the values of
115 stdout and stderr for the last executed command only.
116 """
117
118 # Execute the code
119 #self._namespace_lock.acquire()
120 self._trap.flush()
121 self._trap.trap()
122 self._runlines(lines)
123 self.lastCommandIndex += 1
124 self._trap.release()
125 #self._namespace_lock.release()
126
127 # Save stdin, stdout and stderr to lists
128 #self._command_lock.acquire()
129 self._stdin.append(lines)
130 self._stdout.append(self.prune_output(self._trap.out.getvalue()))
131 self._stderr.append(self.prune_output(self._trap.err.getvalue()))
132 #self._command_lock.release()
133
134 def prune_output(self, s):
135 """Only return the first and last 1600 chars of stdout and stderr.
136
137 Something like this is required to make sure that the engine and
138 controller don't become overwhelmed by the size of stdout/stderr.
139 """
140 if len(s) > 3200:
141 return s[:1600] + '\n............\n' + s[-1600:]
142 else:
143 return s
144
145 # Lifted from iplib.InteractiveShell
146 def _runlines(self,lines):
147 """Run a string of one or more lines of source.
148
149 This method is capable of running a string containing multiple source
150 lines, as if they had been entered at the IPython prompt. Since it
151 exposes IPython's processing machinery, the given strings can contain
152 magic calls (%magic), special shell access (!cmd), etc."""
153
154 # We must start with a clean buffer, in case this is run from an
155 # interactive IPython session (via a magic, for example).
156 self.resetbuffer()
157 lines = lines.split('\n')
158 more = 0
159 for line in lines:
160 # skip blank lines so we don't mess up the prompt counter, but do
161 # NOT skip even a blank line if we are in a code block (more is
162 # true)
163 if line or more:
164 more = self.push((self._prefilter(line,more)))
165 # IPython's runsource returns None if there was an error
166 # compiling the code. This allows us to stop processing right
167 # away, so the user gets the error message at the right place.
168 if more is None:
169 break
170 # final newline in case the input didn't have it, so that the code
171 # actually does get executed
172 if more:
173 self.push('\n')
174
175 def runcode(self, code):
176 """Execute a code object.
177
178 When an exception occurs, self.showtraceback() is called to
179 display a traceback. All exceptions are caught except
180 SystemExit, which is reraised.
181
182 A note about KeyboardInterrupt: this exception may occur
183 elsewhere in this code, and may not always be caught. The
184 caller should be prepared to deal with it.
185
186 """
187
188 self._last_type = self._last_traceback = self._last_value = None
189 try:
190 exec code in self.locals
191 except:
192 # Since the exception info may need to travel across the wire, we
193 # pack it in right away. Note that we are abusing the exception
194 # value to store a fully formatted traceback, since the stack can
195 # not be serialized for network transmission.
196 et,ev,tb = sys.exc_info()
197 self._last_type = et
198 self._last_traceback = tb
199 tbinfo = self.tbHandler.text(et,ev,tb)
200 # Construct a meaningful traceback message for shipping over the
201 # wire.
202 buf = pprint.pformat(self.buffer)
203 try:
204 ename = et.__name__
205 except:
206 ename = et
207 msg = """\
208 %(ev)s
209 ***************************************************************************
210 An exception occurred in an IPython engine while executing user code.
211
212 Current execution buffer (lines being run):
213 %(buf)s
214
215 A full traceback from the actual engine:
216 %(tbinfo)s
217 ***************************************************************************
218 """ % locals()
219 self._last_value = msg
220 else:
221 if softspace(sys.stdout, 0):
222 print
223
224 ##################################################################
225 # Methods that are a part of the official interface
226 #
227 # These methods should also be put in the
228 # kernel.coreservice.ICoreService interface.
229 #
230 # These methods must conform to certain restrictions that allow
231 # them to be exposed to various network protocols:
232 #
233 # - As much as possible, these methods must return types that can be
234 # serialized by PB, XML-RPC and SOAP. None is OK.
235 # - Every method must be thread safe. There are some locks provided
236 # for this purpose, but new, specialized locks can be added to the
237 # class.
238 ##################################################################
239
240 # Methods for running code
241
242 def exc_info(self):
243 """Return exception information much like sys.exc_info().
244
245 This method returns the same (etype,evalue,tb) tuple as sys.exc_info,
246 but from the last time that the engine had an exception fire."""
247
248 return self._last_type,self._last_value,self._last_traceback
249
250 def execute(self, lines):
251 self._trapRunlines(lines)
252 if self._last_type is None:
253 return self.getCommand()
254 else:
255 raise self._last_type(self._last_value)
256
257 # Methods for working with the namespace
258
259 def put(self, key, value):
260 """Put value into locals namespace with name key.
261
262 I have often called this push(), but in this case the
263 InteractiveConsole class already defines a push() method that
264 is different.
265 """
266
267 if not isinstance(key, str):
268 raise TypeError, "Objects must be keyed by strings."
269 self.update({key:value})
270
271 def get(self, key):
272 """Gets an item out of the self.locals dict by key.
273
274 Raise NameError if the object doesn't exist.
275
276 I have often called this pull(). I still like that better.
277 """
278
279 class NotDefined(object):
280 """A class to signify an objects that is not in the users ns."""
281 pass
282
283 if not isinstance(key, str):
284 raise TypeError, "Objects must be keyed by strings."
285 result = self.locals.get(key, NotDefined())
286 if isinstance(result, NotDefined):
287 raise NameError('name %s is not defined' % key)
288 else:
289 return result
290
291
292 def update(self, dictOfData):
293 """Loads a dict of key value pairs into the self.locals namespace."""
294 if not isinstance(dictOfData, dict):
295 raise TypeError, "update() takes a dict object."
296 #self._namespace_lock.acquire()
297 self.locals.update(dictOfData)
298 #self._namespace_lock.release()
299
300 # Methods for getting stdout/stderr/stdin
301
302 def reset(self):
303 """Reset the InteractiveShell."""
304
305 #self._command_lock.acquire()
306 self._stdin = []
307 self._stdout = []
308 self._stderr = []
309 self.lastCommandIndex = -1
310 #self._command_lock.release()
311
312 #self._namespace_lock.acquire()
313 # preserve id, mpi objects
314 mpi = self.locals.get('mpi', None)
315 id = self.locals.get('id', None)
316 del self.locals
317 self.locals = {'mpi': mpi, 'id': id}
318 #self._namespace_lock.release()
319
320 def getCommand(self,i=None):
321 """Get the stdin/stdout/stderr of command i."""
322
323 #self._command_lock.acquire()
324
325
326 if i is not None and not isinstance(i, int):
327 raise TypeError("Command index not an int: " + str(i))
328
329 if i in range(self.lastCommandIndex + 1):
330 inResult = self._stdin[i]
331 outResult = self._stdout[i]
332 errResult = self._stderr[i]
333 cmdNum = i
334 elif i is None and self.lastCommandIndex >= 0:
335 inResult = self._stdin[self.lastCommandIndex]
336 outResult = self._stdout[self.lastCommandIndex]
337 errResult = self._stderr[self.lastCommandIndex]
338 cmdNum = self.lastCommandIndex
339 else:
340 inResult = None
341 outResult = None
342 errResult = None
343
344 #self._command_lock.release()
345
346 if inResult is not None:
347 return dict(commandIndex=cmdNum, stdin=inResult, stdout=outResult, stderr=errResult)
348 else:
349 raise IndexError("Command with index %s does not exist" % str(i))
350
351 def getLastCommandIndex(self):
352 """Get the index of the last command."""
353 #self._command_lock.acquire()
354 ind = self.lastCommandIndex
355 #self._command_lock.release()
356 return ind
357
@@ -1,67 +0,0
1 # encoding: utf-8
2
3 """This file contains unittests for the shell.py module."""
4
5 __docformat__ = "restructuredtext en"
6
7 #-------------------------------------------------------------------------------
8 # Copyright (C) 2008 The IPython Development Team
9 #
10 # Distributed under the terms of the BSD License. The full license is in
11 # the file COPYING, distributed as part of this software.
12 #-------------------------------------------------------------------------------
13
14 #-------------------------------------------------------------------------------
15 # Imports
16 #-------------------------------------------------------------------------------
17
18 import unittest
19 from IPython.kernel.core import shell
20
21 resultKeys = ('commandIndex', 'stdin', 'stdout', 'stderr')
22
23 class BasicShellTest(unittest.TestCase):
24
25 def setUp(self):
26 self.s = shell.InteractiveShell()
27
28 def testExecute(self):
29 commands = [(0,"a = 5","",""),
30 (1,"b = 10","",""),
31 (2,"c = a + b","",""),
32 (3,"print c","15\n",""),
33 (4,"import math","",""),
34 (5,"2.0*math.pi","6.2831853071795862\n","")]
35 for c in commands:
36 result = self.s.execute(c[1])
37 self.assertEquals(result, dict(zip(resultKeys,c)))
38
39 def testPutGet(self):
40 objs = [10,"hi there",1.2342354,{"p":(1,2)}]
41 for o in objs:
42 self.s.put("key",o)
43 value = self.s.get("key")
44 self.assertEquals(value,o)
45 self.assertRaises(TypeError, self.s.put,10)
46 self.assertRaises(TypeError, self.s.get,10)
47 self.s.reset()
48 self.assertRaises(NameError, self.s.get, 'a')
49
50 def testUpdate(self):
51 d = {"a": 10, "b": 34.3434, "c": "hi there"}
52 self.s.update(d)
53 for k in d.keys():
54 value = self.s.get(k)
55 self.assertEquals(value, d[k])
56 self.assertRaises(TypeError, self.s.update, [1,2,2])
57
58 def testCommand(self):
59 self.assertRaises(IndexError,self.s.getCommand)
60 self.s.execute("a = 5")
61 self.assertEquals(self.s.getCommand(), dict(zip(resultKeys, (0,"a = 5","",""))))
62 self.assertEquals(self.s.getCommand(0), dict(zip(resultKeys, (0,"a = 5","",""))))
63 self.s.reset()
64 self.assertEquals(self.s.getLastCommandIndex(),-1)
65 self.assertRaises(IndexError,self.s.getCommand)
66
67 No newline at end of file
@@ -1,50 +0,0
1 Notes for Windows Users
2 =======================
3
4 See http://ipython.scipy.org/moin/IpythonOnWindows for up-to-date information
5 about running IPython on Windows.
6
7
8 Requirements
9 ------------
10
11 IPython runs under (as far as the Windows family is concerned):
12
13 - Windows XP, 2000 (and probably WinNT): works well. It needs:
14
15 * PyWin32: http://sourceforge.net/projects/pywin32/
16
17 * PyReadline: http://ipython.scipy.org/moin/PyReadline/Intro
18
19 * If you are using Python2.4, this in turn requires Tomas Heller's ctypes
20 from: http://starship.python.net/crew/theller/ctypes (not needed for Python
21 2.5 users, since 2.5 already ships with ctypes).
22
23 - Windows 95/98/ME: I have no idea. It should work, but I can't test.
24
25 - CygWin environments should work, they are basically Posix.
26
27 It needs Python 2.3 or newer.
28
29
30 Installation
31 ------------
32
33 Double-click the supplied .exe installer file. If all goes well, that's all
34 you need to do. You should now have an IPython entry in your Start Menu.
35
36
37 Installation from source distribution
38 -------------------------------------
39
40 In case the automatic installer does not work for some reason, you can
41 download the ipython-XXX.tar.gz file, which contains the full IPython source
42 distribution (the popular WinZip can read .tar.gz files).
43
44 After uncompressing the archive, you can install it at a command terminal just
45 like any other Python module, by using python setup.py install'. After this
46 completes, you can run the supplied win32_manual_post_install.py script which
47 will add the relevant shortcuts to your startup menu.
48
49 Optionally, you may skip installation altogether and just launch "ipython.py"
50 from the root folder of the extracted source distribution.
@@ -1,20 +0,0
1 # -*- coding: utf-8 -*-
2
3 import IPython.ipapi
4 ip = IPython.ipapi.get()
5
6 def ${name}_f(self, arg):
7 r""" Short explanation
8
9 Long explanation, examples
10
11 """
12
13 # opts,args = self.parse_options(arg,'rx')
14 # if 'r' in opts: pass
15
16
17
18 ip.expose_magic("${name}",${name}_f)
19
20
@@ -1,32 +0,0
1 #!/usr/bin/env python
2 # encoding: utf-8
3
4 # This example shows how the AsynTaskClient can be used
5 # This example is currently broken
6
7 from twisted.internet import reactor, defer
8 from IPython.kernel import asyncclient
9
10 mec = asyncclient.AsyncMultiEngineClient(('localhost', 10105))
11 tc = asyncclient.AsyncTaskClient(('localhost',10113))
12
13 cmd1 = """\
14 a = 5
15 b = 10*d
16 c = a*b*d
17 """
18
19 t1 = asyncclient.Task(cmd1, clear_before=False, clear_after=True, pull=['a','b','c'])
20
21 d = mec.push(dict(d=30))
22
23 def raise_and_print(tr):
24 tr.raiseException()
25 print "a, b: ", tr.ns.a, tr.ns.b
26 return tr
27
28 d.addCallback(lambda _: tc.run(t1))
29 d.addCallback(lambda tid: tc.get_task_result(tid,block=True))
30 d.addCallback(raise_and_print)
31 d.addCallback(lambda _: reactor.stop())
32 reactor.run()
@@ -1,138 +0,0
1 =========================================
2 Advanced installation options for IPython
3 =========================================
4
5 .. contents::
6
7 Introduction
8 ============
9
10 IPython enables parallel applications to be developed in Python. This document
11 describes the steps required to install IPython. For an overview of IPython's
12 architecture as it relates to parallel computing, see our :ref:`introduction to
13 parallel computing with IPython <ip1par>`.
14
15 Please let us know if you have problems installing IPython or any of its
16 dependencies. We have tested IPython extensively with Python 2.4 and 2.5.
17
18 .. warning::
19
20 IPython will not work with Python 2.3 or below.
21
22 IPython has three required dependencies:
23
24 1. `IPython`__
25 2. `Zope Interface`__
26 3. `Twisted`__
27 4. `Foolscap`__
28
29 .. __: http://ipython.scipy.org
30 .. __: http://pypi.python.org/pypi/zope.interface
31 .. __: http://twistedmatrix.com
32 .. __: http://foolscap.lothar.com/trac
33
34 It also has the following optional dependencies:
35
36 1. pexpect (used for certain tests)
37 2. nose (used to run our test suite)
38 3. sqlalchemy (used for database support)
39 4. mpi4py (for MPI support)
40 5. Sphinx and pygments (for building documentation)
41 6. pyOpenSSL (for security)
42
43 Getting IPython
44 ================
45
46 IPython development has been moved to `Launchpad`_. The development branch of IPython can be checkout out using `Bazaar`_::
47
48 $ bzr branch lp:///~ipython/ipython/ipython1-dev
49
50 .. _Launchpad: http://www.launchpad.net/ipython
51 .. _Bazaar: http://bazaar-vcs.org/
52
53 Installation using setuptools
54 =============================
55
56 The easiest way of installing IPython and its dependencies is using
57 `setuptools`_. If you have setuptools installed you can simple use the ``easy_install``
58 script that comes with setuptools (this should be on your path if you have setuptools)::
59
60 $ easy_install ipython1
61
62 This will download and install the latest version of IPython as well as all of its dependencies. For this to work, you will need to be connected to the internet when you run this command. This will install everything info the ``site-packages`` directory of your Python distribution. If this is the system wide Python, you will likely need admin privileges. For information about installing Python packages to other locations (that don't require admin privileges) see the `setuptools`_ documentation.
63
64 .. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools
65
66 If you don't want `setuptools`_ to automatically install the dependencies, you can also get the dependencies yourself, using ``easy_install``::
67
68 $ easy_install IPython
69 $ easy_install zope.interface
70 $ easy_install Twisted
71 $ easy_install foolscap
72
73 or by simply downloading and installing the dependencies manually.
74
75 If you want to have secure (highly recommended) network connections, you will also
76 need to get `pyOpenSSL`__, version 0.6, or just do:
77
78 $ easy_install ipython1[security]
79
80 .. hint:: If you want to do development on IPython and want to always
81 run off your development branch, you can run
82 :command:`python setupegg.py develop` in the IPython source tree.
83
84 .. __: http://pyopenssl.sourceforge.net/
85
86 Installation using plain distutils
87 ==================================
88
89 If you don't have `setuptools`_ installed or don't want to use it, you can also install IPython and its dependencies using ``distutils``. In this approach, you will need to get the most recent stable releases of IPython's dependencies and install each of them by doing::
90
91 $ python setup.py install
92
93 The dependencies need to be installed before installing IPython. After installing the dependencies, install IPython by running::
94
95 $ cd ipython1-dev
96 $ python setup.py install
97
98 .. note:: Here we are using setup.py rather than setupegg.py.
99
100 .. _install_testing:
101
102 Testing
103 =======
104
105 Once you have completed the installation of the IPython kernel you can run our test suite
106 with the command::
107
108 trial ipython1
109
110 Or if you have `nose`__ installed::
111
112 nosetests -v ipython1
113
114 The ``trial`` command is part of Twisted and allows asynchronous network based
115 applications to be tested using Python's unittest framework. Please let us know
116 if the tests do not pass. The best way to get in touch with us is on the `IPython
117 developer mailing list`_.
118
119 .. __: http://somethingaboutorange.com/mrl/projects/nose/
120 .. _IPython developer mailing list: http://projects.scipy.org/mailman/listinfo/ipython-dev
121
122 MPI Support
123 ===========
124
125 IPython includes optional support for the Message Passing Interface (`MPI`_),
126 which enables the IPython Engines to pass data between each other using `MPI`_. To use MPI with IPython, the minimal requirements are:
127
128 * An MPI implementation (we recommend `Open MPI`_)
129 * A way to call MPI (we recommend `mpi4py`_)
130
131 But, IPython should work with any MPI implementation and with any code
132 (Python/C/C++/Fortran) that uses MPI. Please contact us for more information about
133 this.
134
135 .. _MPI: http://www-unix.mcs.anl.gov/mpi/
136 .. _mpi4py: http://mpi4py.scipy.org/
137 .. _Open MPI: http://www.open-mpi.org/
138
@@ -1,272 +0,0
1 =============================
2 Basic installation of IPython
3 =============================
4
5 Installation
6 ============
7
8 Instant instructions
9 --------------------
10
11 If you are of the impatient kind, under Linux/Unix simply untar/unzip
12 the download, then install with 'python setup.py install'. Under
13 Windows, double-click on the provided .exe binary installer.
14
15 Then, take a look at Customization_ section for configuring things
16 optimally and `Quick tips`_ for quick tips on efficient use of
17 IPython. You can later refer to the rest of the manual for all the
18 gory details.
19
20 See the notes in upgrading_ section for upgrading IPython versions.
21
22
23 Detailed Unix instructions (Linux, Mac OS X, etc.)
24
25 For RPM based systems, simply install the supplied package in the usual
26 manner. If you download the tar archive, the process is:
27
28 1. Unzip/untar the ipython-XXX.tar.gz file wherever you want (XXX is
29 the version number). It will make a directory called ipython-XXX.
30 Change into that directory where you will find the files README
31 and setup.py. Once you've completed the installation, you can
32 safely remove this directory.
33 2. If you are installing over a previous installation of version
34 0.2.0 or earlier, first remove your $HOME/.ipython directory,
35 since the configuration file format has changed somewhat (the '='
36 were removed from all option specifications). Or you can call
37 ipython with the -upgrade option and it will do this automatically
38 for you.
39 3. IPython uses distutils, so you can install it by simply typing at
40 the system prompt (don't type the $)::
41
42 $ python setup.py install
43
44 Note that this assumes you have root access to your machine. If
45 you don't have root access or don't want IPython to go in the
46 default python directories, you'll need to use the ``--home`` option
47 (or ``--prefix``). For example::
48
49 $ python setup.py install --home $HOME/local
50
51 will install IPython into $HOME/local and its subdirectories
52 (creating them if necessary).
53 You can type::
54
55 $ python setup.py --help
56
57 for more details.
58
59 Note that if you change the default location for ``--home`` at
60 installation, IPython may end up installed at a location which is
61 not part of your $PYTHONPATH environment variable. In this case,
62 you'll need to configure this variable to include the actual
63 directory where the IPython/ directory ended (typically the value
64 you give to ``--home`` plus /lib/python).
65
66
67 Mac OSX information
68 -------------------
69
70 Under OSX, there is a choice you need to make. Apple ships its own build
71 of Python, which lives in the core OSX filesystem hierarchy. You can
72 also manually install a separate Python, either purely by hand
73 (typically in /usr/local) or by using Fink, which puts everything under
74 /sw. Which route to follow is a matter of personal preference, as I've
75 seen users who favor each of the approaches. Here I will simply list the
76 known installation issues under OSX, along with their solutions.
77
78 This page: http://geosci.uchicago.edu/~tobis/pylab.html contains
79 information on this topic, with additional details on how to make
80 IPython and matplotlib play nicely under OSX.
81
82 To run IPython and readline on OSX "Leopard" system python, see the
83 wiki page at http://ipython.scipy.org/moin/InstallationOSXLeopard
84
85
86 GUI problems
87 ------------
88
89 The following instructions apply to an install of IPython under OSX from
90 unpacking the .tar.gz distribution and installing it for the default
91 Python interpreter shipped by Apple. If you are using a fink install,
92 fink will take care of these details for you, by installing IPython
93 against fink's Python.
94
95 IPython offers various forms of support for interacting with graphical
96 applications from the command line, from simple Tk apps (which are in
97 principle always supported by Python) to interactive control of WX, Qt
98 and GTK apps. Under OSX, however, this requires that ipython is
99 installed by calling the special pythonw script at installation time,
100 which takes care of coordinating things with Apple's graphical environment.
101
102 So when installing under OSX, it is best to use the following command::
103
104 $ sudo pythonw setup.py install --install-scripts=/usr/local/bin
105
106 or
107
108 $ sudo pythonw setup.py install --install-scripts=/usr/bin
109
110 depending on where you like to keep hand-installed executables.
111
112 The resulting script will have an appropriate shebang line (the first
113 line in the script whic begins with #!...) such that the ipython
114 interpreter can interact with the OS X GUI. If the installed version
115 does not work and has a shebang line that points to, for example, just
116 /usr/bin/python, then you might have a stale, cached version in your
117 build/scripts-<python-version> directory. Delete that directory and
118 rerun the setup.py.
119
120 It is also a good idea to use the special flag ``--install-scripts`` as
121 indicated above, to ensure that the ipython scripts end up in a location
122 which is part of your $PATH. Otherwise Apple's Python will put the
123 scripts in an internal directory not available by default at the command
124 line (if you use /usr/local/bin, you need to make sure this is in your
125 $PATH, which may not be true by default).
126
127
128 Readline problems
129 -----------------
130
131 By default, the Python version shipped by Apple does not include the
132 readline library, so central to IPython's behavior. If you install
133 IPython against Apple's Python, you will not have arrow keys, tab
134 completion, etc. For Mac OSX 10.3 (Panther), you can find a prebuilt
135 readline library here:
136 http://pythonmac.org/packages/readline-5.0-py2.3-macosx10.3.zip
137
138 If you are using OSX 10.4 (Tiger), after installing this package you
139 need to either:
140
141 1. move readline.so from /Library/Python/2.3 to
142 /Library/Python/2.3/site-packages, or
143 2. install http://pythonmac.org/packages/TigerPython23Compat.pkg.zip
144
145 Users installing against Fink's Python or a properly hand-built one
146 should not have this problem.
147
148
149 DarwinPorts
150 -----------
151
152 I report here a message from an OSX user, who suggests an alternative
153 means of using IPython under this operating system with good results.
154 Please let me know of any updates that may be useful for this section.
155 His message is reproduced verbatim below:
156
157 From: Markus Banfi <markus.banfi-AT-mospheira.net>
158
159 As a MacOS X (10.4.2) user I prefer to install software using
160 DawinPorts instead of Fink. I had no problems installing ipython
161 with DarwinPorts. It's just:
162
163 sudo port install py-ipython
164
165 It automatically resolved all dependencies (python24, readline,
166 py-readline). So far I did not encounter any problems with the
167 DarwinPorts port of ipython.
168
169
170
171 Windows instructions
172 --------------------
173
174 Some of IPython's very useful features are:
175
176 * Integrated readline support (Tab-based file, object and attribute
177 completion, input history across sessions, editable command line,
178 etc.)
179 * Coloring of prompts, code and tracebacks.
180
181 .. _pyreadline:
182
183 These, by default, are only available under Unix-like operating systems.
184 However, thanks to Gary Bishop's work, Windows XP/2k users can also
185 benefit from them. His readline library originally implemented both GNU
186 readline functionality and color support, so that IPython under Windows
187 XP/2k can be as friendly and powerful as under Unix-like environments.
188
189 This library, now named PyReadline, has been absorbed by the IPython
190 team (Jörgen Stenarson, in particular), and it continues to be developed
191 with new features, as well as being distributed directly from the
192 IPython site.
193
194 The PyReadline extension requires CTypes and the windows IPython
195 installer needs PyWin32, so in all you need:
196
197 1. PyWin32 from http://sourceforge.net/projects/pywin32.
198 2. PyReadline for Windows from
199 http://ipython.scipy.org/moin/PyReadline/Intro. That page contains
200 further details on using and configuring the system to your liking.
201 3. Finally, only if you are using Python 2.3 or 2.4, you need CTypes
202 from http://starship.python.net/crew/theller/ctypes(you must use
203 version 0.9.1 or newer). This package is included in Python 2.5,
204 so you don't need to manually get it if your Python version is 2.5
205 or newer.
206
207 Warning about a broken readline-like library: several users have
208 reported problems stemming from using the pseudo-readline library at
209 http://newcenturycomputers.net/projects/readline.html. This is a broken
210 library which, while called readline, only implements an incomplete
211 subset of the readline API. Since it is still called readline, it fools
212 IPython's detection mechanisms and causes unpredictable crashes later.
213 If you wish to use IPython under Windows, you must NOT use this library,
214 which for all purposes is (at least as of version 1.6) terminally broken.
215
216
217 Installation procedure
218 ----------------------
219
220 Once you have the above installed, from the IPython download directory
221 grab the ipython-XXX.win32.exe file, where XXX represents the version
222 number. This is a regular windows executable installer, which you can
223 simply double-click to install. It will add an entry for IPython to your
224 Start Menu, as well as registering IPython in the Windows list of
225 applications, so you can later uninstall it from the Control Panel.
226
227 IPython tries to install the configuration information in a directory
228 named .ipython (_ipython under Windows) located in your 'home'
229 directory. IPython sets this directory by looking for a HOME environment
230 variable; if such a variable does not exist, it uses HOMEDRIVE\HOMEPATH
231 (these are always defined by Windows). This typically gives something
232 like C:\Documents and Settings\YourUserName, but your local details may
233 vary. In this directory you will find all the files that configure
234 IPython's defaults, and you can put there your profiles and extensions.
235 This directory is automatically added by IPython to sys.path, so
236 anything you place there can be found by import statements.
237
238
239 Upgrading
240 ---------
241
242 For an IPython upgrade, you should first uninstall the previous version.
243 This will ensure that all files and directories (such as the
244 documentation) which carry embedded version strings in their names are
245 properly removed.
246
247
248 Manual installation under Win32
249 -------------------------------
250
251 In case the automatic installer does not work for some reason, you can
252 download the ipython-XXX.tar.gz file, which contains the full IPython
253 source distribution (the popular WinZip can read .tar.gz files). After
254 uncompressing the archive, you can install it at a command terminal just
255 like any other Python module, by using 'python setup.py install'.
256
257 After the installation, run the supplied win32_manual_post_install.py
258 script, which creates the necessary Start Menu shortcuts for you.
259
260
261 .. upgrading:
262
263 Upgrading from a previous version
264 ---------------------------------
265
266 If you are upgrading from a previous version of IPython, you may want
267 to upgrade the contents of your ~/.ipython directory. Just run
268 %upgrade, look at the diffs and delete the suggested files manually,
269 if you think you can lose the old versions. %upgrade will never
270 overwrite or delete anything.
271
272
@@ -1,141 +0,0
1 #!python
2 """Windows-specific part of the installation"""
3
4 import os, sys
5
6 try:
7 import shutil,pythoncom
8 from win32com.shell import shell
9 import _winreg as wreg
10 except ImportError:
11 print """
12 You seem to be missing the PythonWin extensions necessary for automatic
13 installation. You can get them (free) from
14 http://starship.python.net/crew/mhammond/
15
16 Please see the manual for details if you want to finish the installation by
17 hand, or get PythonWin and repeat the procedure.
18
19 Press <Enter> to exit this installer."""
20 raw_input()
21 sys.exit()
22
23
24 def make_shortcut(fname,target,args='',start_in='',comment='',icon=None):
25 """Make a Windows shortcut (.lnk) file.
26
27 make_shortcut(fname,target,args='',start_in='',comment='',icon=None)
28
29 Arguments:
30 fname - name of the final shortcut file (include the .lnk)
31 target - what the shortcut will point to
32 args - additional arguments to pass to the target program
33 start_in - directory where the target command will be called
34 comment - for the popup tooltips
35 icon - optional icon file. This must be a tuple of the type
36 (icon_file,index), where index is the index of the icon you want
37 in the file. For single .ico files, index=0, but for icon libraries
38 contained in a single file it can be >0.
39 """
40
41 shortcut = pythoncom.CoCreateInstance(
42 shell.CLSID_ShellLink, None,
43 pythoncom.CLSCTX_INPROC_SERVER, shell.IID_IShellLink
44 )
45 shortcut.SetPath(target)
46 shortcut.SetArguments(args)
47 shortcut.SetWorkingDirectory(start_in)
48 shortcut.SetDescription(comment)
49 if icon:
50 shortcut.SetIconLocation(*icon)
51 shortcut.QueryInterface(pythoncom.IID_IPersistFile).Save(fname,0)
52
53
54 def run(wait=0):
55 # Find where the Start Menu and My Documents are on the filesystem
56 key = wreg.OpenKey(wreg.HKEY_CURRENT_USER,
57 r'Software\Microsoft\Windows\CurrentVersion'
58 r'\Explorer\Shell Folders')
59
60 programs_dir = wreg.QueryValueEx(key,'Programs')[0]
61 my_documents_dir = wreg.QueryValueEx(key,'Personal')[0]
62 key.Close()
63
64 # Find where the 'program files' directory is
65 key = wreg.OpenKey(wreg.HKEY_LOCAL_MACHINE,
66 r'SOFTWARE\Microsoft\Windows\CurrentVersion')
67
68 program_files_dir = wreg.QueryValueEx(key,'ProgramFilesDir')[0]
69 key.Close()
70
71
72 # File and directory names
73 ip_dir = program_files_dir + r'\IPython'
74 ip_prog_dir = programs_dir + r'\IPython'
75 doc_dir = ip_dir+r'\doc'
76 ip_filename = ip_dir+r'\IPython_shell.py'
77 pycon_icon = doc_dir+r'\pycon.ico'
78
79 if not os.path.isdir(ip_dir):
80 os.mkdir(ip_dir)
81
82 # Copy startup script and documentation
83 shutil.copy(sys.prefix+r'\Scripts\ipython',ip_filename)
84 if os.path.isdir(doc_dir):
85 shutil.rmtree(doc_dir)
86 shutil.copytree('doc',doc_dir)
87
88 # make shortcuts for IPython, html and pdf docs.
89 print 'Making entries for IPython in Start Menu...',
90
91 # Create .bat file in \Scripts
92 fic = open(sys.prefix + r'\Scripts\ipython.bat','w')
93 fic.write('"' + sys.prefix + r'\python.exe' + '" -i ' + '"' +
94 sys.prefix + r'\Scripts\ipython" %*')
95 fic.close()
96
97 # Create .bat file in \\Scripts
98 fic = open(sys.prefix + '\\Scripts\\ipython.bat','w')
99 fic.write('"' + sys.prefix + '\\python.exe' + '" -i ' + '"' + sys.prefix + '\\Scripts\ipython" %*')
100 fic.close()
101
102 # Create shortcuts in Programs\IPython:
103 if not os.path.isdir(ip_prog_dir):
104 os.mkdir(ip_prog_dir)
105 os.chdir(ip_prog_dir)
106
107 man_pdf = doc_dir + r'\manual\ipython.pdf'
108 man_htm = doc_dir + r'\manual\ipython.html'
109
110 make_shortcut('IPython.lnk',sys.executable, '"%s"' % ip_filename,
111 my_documents_dir,
112 'IPython - Enhanced python command line interpreter',
113 (pycon_icon,0))
114 make_shortcut('pysh.lnk',sys.executable, '"%s" -p pysh' % ip_filename,
115 my_documents_dir,
116 'pysh - a system shell with Python syntax (IPython based)',
117 (pycon_icon,0))
118 make_shortcut('Manual in HTML format.lnk',man_htm,'','',
119 'IPython Manual - HTML format')
120 make_shortcut('Manual in PDF format.lnk',man_pdf,'','',
121 'IPython Manual - PDF format')
122
123 print """Done.
124
125 I created the directory %s. There you will find the
126 IPython startup script and manuals.
127
128 An IPython menu was also created in your Start Menu, with entries for
129 IPython itself and the manual in HTML and PDF formats.
130
131 For reading PDF documents you need the freely available Adobe Acrobat
132 Reader. If you don't have it, you can download it from:
133 http://www.adobe.com/products/acrobat/readstep2.html
134 """ % ip_dir
135
136 if wait:
137 print "Finished with IPython installation. Press Enter to exit this installer.",
138 raw_input()
139
140 if __name__ == '__main__':
141 run()
General Comments 0
You need to be logged in to leave comments. Login now