##// END OF EJS Templates
moved getdefaultencoding from text to py3compat
Brandon Parsons -
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,702 +1,702 b''
1 1 """A simple configuration system.
2 2
3 3 Authors
4 4 -------
5 5 * Brian Granger
6 6 * Fernando Perez
7 7 * Min RK
8 8 """
9 9
10 10 #-----------------------------------------------------------------------------
11 11 # Copyright (C) 2008-2011 The IPython Development Team
12 12 #
13 13 # Distributed under the terms of the BSD License. The full license is in
14 14 # the file COPYING, distributed as part of this software.
15 15 #-----------------------------------------------------------------------------
16 16
17 17 #-----------------------------------------------------------------------------
18 18 # Imports
19 19 #-----------------------------------------------------------------------------
20 20
21 21 import __builtin__ as builtin_mod
22 22 import os
23 23 import re
24 24 import sys
25 25
26 26 from IPython.external import argparse
27 27 from IPython.utils.path import filefind, get_ipython_dir
28 28 from IPython.utils import py3compat, text, warn
29 29
30 30 #-----------------------------------------------------------------------------
31 31 # Exceptions
32 32 #-----------------------------------------------------------------------------
33 33
34 34
35 35 class ConfigError(Exception):
36 36 pass
37 37
38 38 class ConfigLoaderError(ConfigError):
39 39 pass
40 40
41 41 class ConfigFileNotFound(ConfigError):
42 42 pass
43 43
44 44 class ArgumentError(ConfigLoaderError):
45 45 pass
46 46
47 47 #-----------------------------------------------------------------------------
48 48 # Argparse fix
49 49 #-----------------------------------------------------------------------------
50 50
51 51 # Unfortunately argparse by default prints help messages to stderr instead of
52 52 # stdout. This makes it annoying to capture long help screens at the command
53 53 # line, since one must know how to pipe stderr, which many users don't know how
54 54 # to do. So we override the print_help method with one that defaults to
55 55 # stdout and use our class instead.
56 56
57 57 class ArgumentParser(argparse.ArgumentParser):
58 58 """Simple argparse subclass that prints help to stdout by default."""
59 59
60 60 def print_help(self, file=None):
61 61 if file is None:
62 62 file = sys.stdout
63 63 return super(ArgumentParser, self).print_help(file)
64 64
65 65 print_help.__doc__ = argparse.ArgumentParser.print_help.__doc__
66 66
67 67 #-----------------------------------------------------------------------------
68 68 # Config class for holding config information
69 69 #-----------------------------------------------------------------------------
70 70
71 71
72 72 class Config(dict):
73 73 """An attribute based dict that can do smart merges."""
74 74
75 75 def __init__(self, *args, **kwds):
76 76 dict.__init__(self, *args, **kwds)
77 77 # This sets self.__dict__ = self, but it has to be done this way
78 78 # because we are also overriding __setattr__.
79 79 dict.__setattr__(self, '__dict__', self)
80 80
81 81 def _merge(self, other):
82 82 to_update = {}
83 83 for k, v in other.iteritems():
84 84 if not self.has_key(k):
85 85 to_update[k] = v
86 86 else: # I have this key
87 87 if isinstance(v, Config):
88 88 # Recursively merge common sub Configs
89 89 self[k]._merge(v)
90 90 else:
91 91 # Plain updates for non-Configs
92 92 to_update[k] = v
93 93
94 94 self.update(to_update)
95 95
96 96 def _is_section_key(self, key):
97 97 if key[0].upper()==key[0] and not key.startswith('_'):
98 98 return True
99 99 else:
100 100 return False
101 101
102 102 def __contains__(self, key):
103 103 if self._is_section_key(key):
104 104 return True
105 105 else:
106 106 return super(Config, self).__contains__(key)
107 107 # .has_key is deprecated for dictionaries.
108 108 has_key = __contains__
109 109
110 110 def _has_section(self, key):
111 111 if self._is_section_key(key):
112 112 if super(Config, self).__contains__(key):
113 113 return True
114 114 return False
115 115
116 116 def copy(self):
117 117 return type(self)(dict.copy(self))
118 118
119 119 def __copy__(self):
120 120 return self.copy()
121 121
122 122 def __deepcopy__(self, memo):
123 123 import copy
124 124 return type(self)(copy.deepcopy(self.items()))
125 125
126 126 def __getitem__(self, key):
127 127 # We cannot use directly self._is_section_key, because it triggers
128 128 # infinite recursion on top of PyPy. Instead, we manually fish the
129 129 # bound method.
130 130 is_section_key = self.__class__._is_section_key.__get__(self)
131 131
132 132 # Because we use this for an exec namespace, we need to delegate
133 133 # the lookup of names in __builtin__ to itself. This means
134 134 # that you can't have section or attribute names that are
135 135 # builtins.
136 136 try:
137 137 return getattr(builtin_mod, key)
138 138 except AttributeError:
139 139 pass
140 140 if is_section_key(key):
141 141 try:
142 142 return dict.__getitem__(self, key)
143 143 except KeyError:
144 144 c = Config()
145 145 dict.__setitem__(self, key, c)
146 146 return c
147 147 else:
148 148 return dict.__getitem__(self, key)
149 149
150 150 def __setitem__(self, key, value):
151 151 # Don't allow names in __builtin__ to be modified.
152 152 if hasattr(builtin_mod, key):
153 153 raise ConfigError('Config variable names cannot have the same name '
154 154 'as a Python builtin: %s' % key)
155 155 if self._is_section_key(key):
156 156 if not isinstance(value, Config):
157 157 raise ValueError('values whose keys begin with an uppercase '
158 158 'char must be Config instances: %r, %r' % (key, value))
159 159 else:
160 160 dict.__setitem__(self, key, value)
161 161
162 162 def __getattr__(self, key):
163 163 try:
164 164 return self.__getitem__(key)
165 165 except KeyError, e:
166 166 raise AttributeError(e)
167 167
168 168 def __setattr__(self, key, value):
169 169 try:
170 170 self.__setitem__(key, value)
171 171 except KeyError, e:
172 172 raise AttributeError(e)
173 173
174 174 def __delattr__(self, key):
175 175 try:
176 176 dict.__delitem__(self, key)
177 177 except KeyError, e:
178 178 raise AttributeError(e)
179 179
180 180
181 181 #-----------------------------------------------------------------------------
182 182 # Config loading classes
183 183 #-----------------------------------------------------------------------------
184 184
185 185
186 186 class ConfigLoader(object):
187 187 """A object for loading configurations from just about anywhere.
188 188
189 189 The resulting configuration is packaged as a :class:`Struct`.
190 190
191 191 Notes
192 192 -----
193 193 A :class:`ConfigLoader` does one thing: load a config from a source
194 194 (file, command line arguments) and returns the data as a :class:`Struct`.
195 195 There are lots of things that :class:`ConfigLoader` does not do. It does
196 196 not implement complex logic for finding config files. It does not handle
197 197 default values or merge multiple configs. These things need to be
198 198 handled elsewhere.
199 199 """
200 200
201 201 def __init__(self):
202 202 """A base class for config loaders.
203 203
204 204 Examples
205 205 --------
206 206
207 207 >>> cl = ConfigLoader()
208 208 >>> config = cl.load_config()
209 209 >>> config
210 210 {}
211 211 """
212 212 self.clear()
213 213
214 214 def clear(self):
215 215 self.config = Config()
216 216
217 217 def load_config(self):
218 218 """Load a config from somewhere, return a :class:`Config` instance.
219 219
220 220 Usually, this will cause self.config to be set and then returned.
221 221 However, in most cases, :meth:`ConfigLoader.clear` should be called
222 222 to erase any previous state.
223 223 """
224 224 self.clear()
225 225 return self.config
226 226
227 227
228 228 class FileConfigLoader(ConfigLoader):
229 229 """A base class for file based configurations.
230 230
231 231 As we add more file based config loaders, the common logic should go
232 232 here.
233 233 """
234 234 pass
235 235
236 236
237 237 class PyFileConfigLoader(FileConfigLoader):
238 238 """A config loader for pure python files.
239 239
240 240 This calls execfile on a plain python file and looks for attributes
241 241 that are all caps. These attribute are added to the config Struct.
242 242 """
243 243
244 244 def __init__(self, filename, path=None):
245 245 """Build a config loader for a filename and path.
246 246
247 247 Parameters
248 248 ----------
249 249 filename : str
250 250 The file name of the config file.
251 251 path : str, list, tuple
252 252 The path to search for the config file on, or a sequence of
253 253 paths to try in order.
254 254 """
255 255 super(PyFileConfigLoader, self).__init__()
256 256 self.filename = filename
257 257 self.path = path
258 258 self.full_filename = ''
259 259 self.data = None
260 260
261 261 def load_config(self):
262 262 """Load the config from a file and return it as a Struct."""
263 263 self.clear()
264 264 try:
265 265 self._find_file()
266 266 except IOError as e:
267 267 raise ConfigFileNotFound(str(e))
268 268 self._read_file_as_dict()
269 269 self._convert_to_config()
270 270 return self.config
271 271
272 272 def _find_file(self):
273 273 """Try to find the file by searching the paths."""
274 274 self.full_filename = filefind(self.filename, self.path)
275 275
276 276 def _read_file_as_dict(self):
277 277 """Load the config file into self.config, with recursive loading."""
278 278 # This closure is made available in the namespace that is used
279 279 # to exec the config file. It allows users to call
280 280 # load_subconfig('myconfig.py') to load config files recursively.
281 281 # It needs to be a closure because it has references to self.path
282 282 # and self.config. The sub-config is loaded with the same path
283 283 # as the parent, but it uses an empty config which is then merged
284 284 # with the parents.
285 285
286 286 # If a profile is specified, the config file will be loaded
287 287 # from that profile
288 288
289 289 def load_subconfig(fname, profile=None):
290 290 # import here to prevent circular imports
291 291 from IPython.core.profiledir import ProfileDir, ProfileDirError
292 292 if profile is not None:
293 293 try:
294 294 profile_dir = ProfileDir.find_profile_dir_by_name(
295 295 get_ipython_dir(),
296 296 profile,
297 297 )
298 298 except ProfileDirError:
299 299 return
300 300 path = profile_dir.location
301 301 else:
302 302 path = self.path
303 303 loader = PyFileConfigLoader(fname, path)
304 304 try:
305 305 sub_config = loader.load_config()
306 306 except ConfigFileNotFound:
307 307 # Pass silently if the sub config is not there. This happens
308 308 # when a user s using a profile, but not the default config.
309 309 pass
310 310 else:
311 311 self.config._merge(sub_config)
312 312
313 313 # Again, this needs to be a closure and should be used in config
314 314 # files to get the config being loaded.
315 315 def get_config():
316 316 return self.config
317 317
318 318 namespace = dict(load_subconfig=load_subconfig, get_config=get_config)
319 319 fs_encoding = sys.getfilesystemencoding() or 'ascii'
320 320 conf_filename = self.full_filename.encode(fs_encoding)
321 321 py3compat.execfile(conf_filename, namespace)
322 322
323 323 def _convert_to_config(self):
324 324 if self.data is None:
325 325 ConfigLoaderError('self.data does not exist')
326 326
327 327
328 328 class CommandLineConfigLoader(ConfigLoader):
329 329 """A config loader for command line arguments.
330 330
331 331 As we add more command line based loaders, the common logic should go
332 332 here.
333 333 """
334 334
335 335 def _exec_config_str(self, lhs, rhs):
336 336 """execute self.config.<lhs>=<rhs>
337 337
338 338 * expands ~ with expanduser
339 339 * tries to assign with raw exec, otherwise assigns with just the string,
340 340 allowing `--C.a=foobar` and `--C.a="foobar"` to be equivalent. *Not*
341 341 equivalent are `--C.a=4` and `--C.a='4'`.
342 342 """
343 343 rhs = os.path.expanduser(rhs)
344 344 exec_str = 'self.config.' + lhs + '=' + rhs
345 345 try:
346 346 # Try to see if regular Python syntax will work. This
347 347 # won't handle strings as the quote marks are removed
348 348 # by the system shell.
349 349 exec exec_str in locals(), globals()
350 350 except (NameError, SyntaxError):
351 351 # This case happens if the rhs is a string but without
352 352 # the quote marks. Use repr, to get quote marks, and
353 353 # 'u' prefix and see if
354 354 # it succeeds. If it still fails, we let it raise.
355 355 exec_str = u'self.config.' + lhs + '= rhs'
356 356 exec exec_str in locals(), globals()
357 357
358 358 def _load_flag(self, cfg):
359 359 """update self.config from a flag, which can be a dict or Config"""
360 360 if isinstance(cfg, (dict, Config)):
361 361 # don't clobber whole config sections, update
362 362 # each section from config:
363 363 for sec,c in cfg.iteritems():
364 364 self.config[sec].update(c)
365 365 else:
366 366 raise TypeError("Invalid flag: %r" % cfg)
367 367
368 368 # raw --identifier=value pattern
369 369 # but *also* accept '-' as wordsep, for aliases
370 370 # accepts: --foo=a
371 371 # --Class.trait=value
372 372 # --alias-name=value
373 373 # rejects: -foo=value
374 374 # --foo
375 375 # --Class.trait
376 376 kv_pattern = re.compile(r'\-\-[A-Za-z][\w\-]*(\.[\w\-]+)*\=.*')
377 377
378 378 # just flags, no assignments, with two *or one* leading '-'
379 379 # accepts: --foo
380 380 # -foo-bar-again
381 381 # rejects: --anything=anything
382 382 # --two.word
383 383
384 384 flag_pattern = re.compile(r'\-\-?\w+[\-\w]*$')
385 385
386 386 class KeyValueConfigLoader(CommandLineConfigLoader):
387 387 """A config loader that loads key value pairs from the command line.
388 388
389 389 This allows command line options to be gives in the following form::
390 390
391 391 ipython --profile="foo" --InteractiveShell.autocall=False
392 392 """
393 393
394 394 def __init__(self, argv=None, aliases=None, flags=None):
395 395 """Create a key value pair config loader.
396 396
397 397 Parameters
398 398 ----------
399 399 argv : list
400 400 A list that has the form of sys.argv[1:] which has unicode
401 401 elements of the form u"key=value". If this is None (default),
402 402 then sys.argv[1:] will be used.
403 403 aliases : dict
404 404 A dict of aliases for configurable traits.
405 405 Keys are the short aliases, Values are the resolved trait.
406 406 Of the form: `{'alias' : 'Configurable.trait'}`
407 407 flags : dict
408 408 A dict of flags, keyed by str name. Vaues can be Config objects,
409 409 dicts, or "key=value" strings. If Config or dict, when the flag
410 410 is triggered, The flag is loaded as `self.config.update(m)`.
411 411
412 412 Returns
413 413 -------
414 414 config : Config
415 415 The resulting Config object.
416 416
417 417 Examples
418 418 --------
419 419
420 420 >>> from IPython.config.loader import KeyValueConfigLoader
421 421 >>> cl = KeyValueConfigLoader()
422 422 >>> cl.load_config(["--A.name='brian'","--B.number=0"])
423 423 {'A': {'name': 'brian'}, 'B': {'number': 0}}
424 424 """
425 425 self.clear()
426 426 if argv is None:
427 427 argv = sys.argv[1:]
428 428 self.argv = argv
429 429 self.aliases = aliases or {}
430 430 self.flags = flags or {}
431 431
432 432
433 433 def clear(self):
434 434 super(KeyValueConfigLoader, self).clear()
435 435 self.extra_args = []
436 436
437 437
438 438 def _decode_argv(self, argv, enc=None):
439 439 """decode argv if bytes, using stin.encoding, falling back on default enc"""
440 440 uargv = []
441 441 if enc is None:
442 enc = text.getdefaultencoding()
442 enc = py3compat.getdefaultencoding()
443 443 for arg in argv:
444 444 if not isinstance(arg, unicode):
445 445 # only decode if not already decoded
446 446 arg = arg.decode(enc)
447 447 uargv.append(arg)
448 448 return uargv
449 449
450 450
451 451 def load_config(self, argv=None, aliases=None, flags=None):
452 452 """Parse the configuration and generate the Config object.
453 453
454 454 After loading, any arguments that are not key-value or
455 455 flags will be stored in self.extra_args - a list of
456 456 unparsed command-line arguments. This is used for
457 457 arguments such as input files or subcommands.
458 458
459 459 Parameters
460 460 ----------
461 461 argv : list, optional
462 462 A list that has the form of sys.argv[1:] which has unicode
463 463 elements of the form u"key=value". If this is None (default),
464 464 then self.argv will be used.
465 465 aliases : dict
466 466 A dict of aliases for configurable traits.
467 467 Keys are the short aliases, Values are the resolved trait.
468 468 Of the form: `{'alias' : 'Configurable.trait'}`
469 469 flags : dict
470 470 A dict of flags, keyed by str name. Values can be Config objects
471 471 or dicts. When the flag is triggered, The config is loaded as
472 472 `self.config.update(cfg)`.
473 473 """
474 474 from IPython.config.configurable import Configurable
475 475
476 476 self.clear()
477 477 if argv is None:
478 478 argv = self.argv
479 479 if aliases is None:
480 480 aliases = self.aliases
481 481 if flags is None:
482 482 flags = self.flags
483 483
484 484 # ensure argv is a list of unicode strings:
485 485 uargv = self._decode_argv(argv)
486 486 for idx,raw in enumerate(uargv):
487 487 # strip leading '-'
488 488 item = raw.lstrip('-')
489 489
490 490 if raw == '--':
491 491 # don't parse arguments after '--'
492 492 # this is useful for relaying arguments to scripts, e.g.
493 493 # ipython -i foo.py --pylab=qt -- args after '--' go-to-foo.py
494 494 self.extra_args.extend(uargv[idx+1:])
495 495 break
496 496
497 497 if kv_pattern.match(raw):
498 498 lhs,rhs = item.split('=',1)
499 499 # Substitute longnames for aliases.
500 500 if lhs in aliases:
501 501 lhs = aliases[lhs]
502 502 if '.' not in lhs:
503 503 # probably a mistyped alias, but not technically illegal
504 504 warn.warn("Unrecognized alias: '%s', it will probably have no effect."%lhs)
505 505 try:
506 506 self._exec_config_str(lhs, rhs)
507 507 except Exception:
508 508 raise ArgumentError("Invalid argument: '%s'" % raw)
509 509
510 510 elif flag_pattern.match(raw):
511 511 if item in flags:
512 512 cfg,help = flags[item]
513 513 self._load_flag(cfg)
514 514 else:
515 515 raise ArgumentError("Unrecognized flag: '%s'"%raw)
516 516 elif raw.startswith('-'):
517 517 kv = '--'+item
518 518 if kv_pattern.match(kv):
519 519 raise ArgumentError("Invalid argument: '%s', did you mean '%s'?"%(raw, kv))
520 520 else:
521 521 raise ArgumentError("Invalid argument: '%s'"%raw)
522 522 else:
523 523 # keep all args that aren't valid in a list,
524 524 # in case our parent knows what to do with them.
525 525 self.extra_args.append(item)
526 526 return self.config
527 527
528 528 class ArgParseConfigLoader(CommandLineConfigLoader):
529 529 """A loader that uses the argparse module to load from the command line."""
530 530
531 531 def __init__(self, argv=None, aliases=None, flags=None, *parser_args, **parser_kw):
532 532 """Create a config loader for use with argparse.
533 533
534 534 Parameters
535 535 ----------
536 536
537 537 argv : optional, list
538 538 If given, used to read command-line arguments from, otherwise
539 539 sys.argv[1:] is used.
540 540
541 541 parser_args : tuple
542 542 A tuple of positional arguments that will be passed to the
543 543 constructor of :class:`argparse.ArgumentParser`.
544 544
545 545 parser_kw : dict
546 546 A tuple of keyword arguments that will be passed to the
547 547 constructor of :class:`argparse.ArgumentParser`.
548 548
549 549 Returns
550 550 -------
551 551 config : Config
552 552 The resulting Config object.
553 553 """
554 554 super(CommandLineConfigLoader, self).__init__()
555 555 self.clear()
556 556 if argv is None:
557 557 argv = sys.argv[1:]
558 558 self.argv = argv
559 559 self.aliases = aliases or {}
560 560 self.flags = flags or {}
561 561
562 562 self.parser_args = parser_args
563 563 self.version = parser_kw.pop("version", None)
564 564 kwargs = dict(argument_default=argparse.SUPPRESS)
565 565 kwargs.update(parser_kw)
566 566 self.parser_kw = kwargs
567 567
568 568 def load_config(self, argv=None, aliases=None, flags=None):
569 569 """Parse command line arguments and return as a Config object.
570 570
571 571 Parameters
572 572 ----------
573 573
574 574 args : optional, list
575 575 If given, a list with the structure of sys.argv[1:] to parse
576 576 arguments from. If not given, the instance's self.argv attribute
577 577 (given at construction time) is used."""
578 578 self.clear()
579 579 if argv is None:
580 580 argv = self.argv
581 581 if aliases is None:
582 582 aliases = self.aliases
583 583 if flags is None:
584 584 flags = self.flags
585 585 self._create_parser(aliases, flags)
586 586 self._parse_args(argv)
587 587 self._convert_to_config()
588 588 return self.config
589 589
590 590 def get_extra_args(self):
591 591 if hasattr(self, 'extra_args'):
592 592 return self.extra_args
593 593 else:
594 594 return []
595 595
596 596 def _create_parser(self, aliases=None, flags=None):
597 597 self.parser = ArgumentParser(*self.parser_args, **self.parser_kw)
598 598 self._add_arguments(aliases, flags)
599 599
600 600 def _add_arguments(self, aliases=None, flags=None):
601 601 raise NotImplementedError("subclasses must implement _add_arguments")
602 602
603 603 def _parse_args(self, args):
604 604 """self.parser->self.parsed_data"""
605 605 # decode sys.argv to support unicode command-line options
606 enc = text.getdefaultencoding()
606 enc = py3compat.getdefaultencoding()
607 607 uargs = [py3compat.cast_unicode(a, enc) for a in args]
608 608 self.parsed_data, self.extra_args = self.parser.parse_known_args(uargs)
609 609
610 610 def _convert_to_config(self):
611 611 """self.parsed_data->self.config"""
612 612 for k, v in vars(self.parsed_data).iteritems():
613 613 exec "self.config.%s = v"%k in locals(), globals()
614 614
615 615 class KVArgParseConfigLoader(ArgParseConfigLoader):
616 616 """A config loader that loads aliases and flags with argparse,
617 617 but will use KVLoader for the rest. This allows better parsing
618 618 of common args, such as `ipython -c 'print 5'`, but still gets
619 619 arbitrary config with `ipython --InteractiveShell.use_readline=False`"""
620 620
621 621 def _convert_to_config(self):
622 622 """self.parsed_data->self.config"""
623 623 for k, v in vars(self.parsed_data).iteritems():
624 624 self._exec_config_str(k, v)
625 625
626 626 def _add_arguments(self, aliases=None, flags=None):
627 627 self.alias_flags = {}
628 628 # print aliases, flags
629 629 if aliases is None:
630 630 aliases = self.aliases
631 631 if flags is None:
632 632 flags = self.flags
633 633 paa = self.parser.add_argument
634 634 for key,value in aliases.iteritems():
635 635 if key in flags:
636 636 # flags
637 637 nargs = '?'
638 638 else:
639 639 nargs = None
640 640 if len(key) is 1:
641 641 paa('-'+key, '--'+key, type=unicode, dest=value, nargs=nargs)
642 642 else:
643 643 paa('--'+key, type=unicode, dest=value, nargs=nargs)
644 644 for key, (value, help) in flags.iteritems():
645 645 if key in self.aliases:
646 646 #
647 647 self.alias_flags[self.aliases[key]] = value
648 648 continue
649 649 if len(key) is 1:
650 650 paa('-'+key, '--'+key, action='append_const', dest='_flags', const=value)
651 651 else:
652 652 paa('--'+key, action='append_const', dest='_flags', const=value)
653 653
654 654 def _convert_to_config(self):
655 655 """self.parsed_data->self.config, parse unrecognized extra args via KVLoader."""
656 656 # remove subconfigs list from namespace before transforming the Namespace
657 657 if '_flags' in self.parsed_data:
658 658 subcs = self.parsed_data._flags
659 659 del self.parsed_data._flags
660 660 else:
661 661 subcs = []
662 662
663 663 for k, v in vars(self.parsed_data).iteritems():
664 664 if v is None:
665 665 # it was a flag that shares the name of an alias
666 666 subcs.append(self.alias_flags[k])
667 667 else:
668 668 # eval the KV assignment
669 669 self._exec_config_str(k, v)
670 670
671 671 for subc in subcs:
672 672 self._load_flag(subc)
673 673
674 674 if self.extra_args:
675 675 sub_parser = KeyValueConfigLoader()
676 676 sub_parser.load_config(self.extra_args)
677 677 self.config._merge(sub_parser.config)
678 678 self.extra_args = sub_parser.extra_args
679 679
680 680
681 681 def load_pyconfig_files(config_files, path):
682 682 """Load multiple Python config files, merging each of them in turn.
683 683
684 684 Parameters
685 685 ==========
686 686 config_files : list of str
687 687 List of config files names to load and merge into the config.
688 688 path : unicode
689 689 The full path to the location of the config files.
690 690 """
691 691 config = Config()
692 692 for cf in config_files:
693 693 loader = PyFileConfigLoader(cf, path=path)
694 694 try:
695 695 next_config = loader.load_config()
696 696 except ConfigFileNotFound:
697 697 pass
698 698 except:
699 699 raise
700 700 else:
701 701 config._merge(next_config)
702 702 return config
@@ -1,59 +1,59 b''
1 1 """Support for interactive macros in IPython"""
2 2
3 3 #*****************************************************************************
4 4 # Copyright (C) 2001-2005 Fernando Perez <fperez@colorado.edu>
5 5 #
6 6 # Distributed under the terms of the BSD License. The full license is in
7 7 # the file COPYING, distributed as part of this software.
8 8 #*****************************************************************************
9 9
10 10 import re
11 11 import sys
12 12
13 13 from IPython.utils import py3compat
14 14
15 15 coding_declaration = re.compile(r"#\s*coding[:=]\s*([-\w.]+)")
16 16
17 17 class Macro(object):
18 18 """Simple class to store the value of macros as strings.
19 19
20 20 Macro is just a callable that executes a string of IPython
21 21 input when called.
22 22
23 23 Args to macro are available in _margv list if you need them.
24 24 """
25 25
26 26 def __init__(self,code):
27 27 """store the macro value, as a single string which can be executed"""
28 28 lines = []
29 29 enc = None
30 30 for line in code.splitlines():
31 31 coding_match = coding_declaration.match(line)
32 32 if coding_match:
33 33 enc = coding_match.group(1)
34 34 else:
35 35 lines.append(line)
36 36 code = "\n".join(lines)
37 37 if isinstance(code, bytes):
38 code = code.decode(enc or sys.getdefaultencoding())
38 code = code.decode(enc or py3compat.getdefaultencoding())
39 39 self.value = code + '\n'
40 40
41 41 def __str__(self):
42 42 return py3compat.unicode_to_str(self.value)
43 43
44 44 def __unicode__(self):
45 45 return self.value
46 46
47 47 def __repr__(self):
48 48 return 'IPython.macro.Macro(%s)' % repr(self.value)
49 49
50 50 def __getstate__(self):
51 51 """ needed for safe pickling via %store """
52 52 return {'value': self.value}
53 53
54 54 def __add__(self, other):
55 55 if isinstance(other, Macro):
56 56 return Macro(self.value + other.value)
57 57 elif isinstance(other, basestring):
58 58 return Macro(self.value + other)
59 59 raise TypeError
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,75 +1,75 b''
1 1 # coding: utf-8
2 2 """Tests for the compilerop module.
3 3 """
4 4 #-----------------------------------------------------------------------------
5 5 # Copyright (C) 2010-2011 The IPython Development Team.
6 6 #
7 7 # Distributed under the terms of the BSD License.
8 8 #
9 9 # The full license is in the file COPYING.txt, distributed with this software.
10 10 #-----------------------------------------------------------------------------
11 11
12 12 #-----------------------------------------------------------------------------
13 13 # Imports
14 14 #-----------------------------------------------------------------------------
15 15 from __future__ import print_function
16 16
17 17 # Stdlib imports
18 18 import linecache
19 19 import sys
20 20
21 21 # Third-party imports
22 22 import nose.tools as nt
23 23
24 24 # Our own imports
25 25 from IPython.core import compilerop
26 26 from IPython.utils import py3compat
27 27
28 28 #-----------------------------------------------------------------------------
29 29 # Test functions
30 30 #-----------------------------------------------------------------------------
31 31
32 32 def test_code_name():
33 33 code = 'x=1'
34 34 name = compilerop.code_name(code)
35 35 nt.assert_true(name.startswith('<ipython-input-0'))
36 36
37 37
38 38 def test_code_name2():
39 39 code = 'x=1'
40 40 name = compilerop.code_name(code, 9)
41 41 nt.assert_true(name.startswith('<ipython-input-9'))
42 42
43 43
44 44 def test_cache():
45 45 """Test the compiler correctly compiles and caches inputs
46 46 """
47 47 cp = compilerop.CachingCompiler()
48 48 ncache = len(linecache.cache)
49 49 cp.cache('x=1')
50 50 nt.assert_true(len(linecache.cache) > ncache)
51 51
52 52 def setUp():
53 53 # Check we're in a proper Python 2 environment (some imports, such
54 54 # as GTK, can change the default encoding, which can hide bugs.)
55 nt.assert_equal(sys.getdefaultencoding(), "utf-8" if py3compat.PY3 else "ascii")
55 nt.assert_equal(py3compat.getdefaultencoding(), "utf-8" if py3compat.PY3 else "ascii")
56 56
57 57 def test_cache_unicode():
58 58 cp = compilerop.CachingCompiler()
59 59 ncache = len(linecache.cache)
60 60 cp.cache(u"t = 'žćčőđ'")
61 61 nt.assert_true(len(linecache.cache) > ncache)
62 62
63 63 def test_compiler_check_cache():
64 64 """Test the compiler properly manages the cache.
65 65 """
66 66 # Rather simple-minded tests that just exercise the API
67 67 cp = compilerop.CachingCompiler()
68 68 cp.cache('x=1', 99)
69 69 # Ensure now that after clearing the cache, our entries survive
70 70 cp.check_cache()
71 71 for k in linecache.cache:
72 72 if k.startswith('<ipython-input-99'):
73 73 break
74 74 else:
75 75 raise AssertionError('Entry for input-99 missing from linecache')
@@ -1,151 +1,151 b''
1 1 # coding: utf-8
2 2 """Tests for the IPython tab-completion machinery.
3 3 """
4 4 #-----------------------------------------------------------------------------
5 5 # Module imports
6 6 #-----------------------------------------------------------------------------
7 7
8 8 # stdlib
9 9 import os
10 10 import shutil
11 11 import sys
12 12 import tempfile
13 13 import unittest
14 14 from datetime import datetime
15 15
16 16 # third party
17 17 import nose.tools as nt
18 18
19 19 # our own packages
20 20 from IPython.config.loader import Config
21 21 from IPython.utils.tempdir import TemporaryDirectory
22 22 from IPython.core.history import HistoryManager, extract_hist_ranges
23 23 from IPython.utils import py3compat
24 24
25 25 def setUp():
26 nt.assert_equal(sys.getdefaultencoding(), "utf-8" if py3compat.PY3 else "ascii")
26 nt.assert_equal(py3compat.getdefaultencoding(), "utf-8" if py3compat.PY3 else "ascii")
27 27
28 28 def test_history():
29 29 ip = get_ipython()
30 30 with TemporaryDirectory() as tmpdir:
31 31 hist_manager_ori = ip.history_manager
32 32 hist_file = os.path.join(tmpdir, 'history.sqlite')
33 33 try:
34 34 ip.history_manager = HistoryManager(shell=ip, hist_file=hist_file)
35 35 hist = [u'a=1', u'def f():\n test = 1\n return test', u"b='β‚¬Γ†ΒΎΓ·ΓŸ'"]
36 36 for i, h in enumerate(hist, start=1):
37 37 ip.history_manager.store_inputs(i, h)
38 38
39 39 ip.history_manager.db_log_output = True
40 40 # Doesn't match the input, but we'll just check it's stored.
41 41 ip.history_manager.output_hist_reprs[3] = "spam"
42 42 ip.history_manager.store_output(3)
43 43
44 44 nt.assert_equal(ip.history_manager.input_hist_raw, [''] + hist)
45 45
46 46 # Detailed tests for _get_range_session
47 47 grs = ip.history_manager._get_range_session
48 48 nt.assert_equal(list(grs(start=2,stop=-1)), zip([0], [2], hist[1:-1]))
49 49 nt.assert_equal(list(grs(start=-2)), zip([0,0], [2,3], hist[-2:]))
50 50 nt.assert_equal(list(grs(output=True)), zip([0,0,0], [1,2,3], zip(hist, [None,None,'spam'])))
51 51
52 52 # Check whether specifying a range beyond the end of the current
53 53 # session results in an error (gh-804)
54 54 ip.magic('%hist 2-500')
55 55
56 56 # Check that we can write non-ascii characters to a file
57 57 ip.magic("%%hist -f %s" % os.path.join(tmpdir, "test1"))
58 58 ip.magic("%%hist -pf %s" % os.path.join(tmpdir, "test2"))
59 59 ip.magic("%%hist -nf %s" % os.path.join(tmpdir, "test3"))
60 60 ip.magic("%%save %s 1-10" % os.path.join(tmpdir, "test4"))
61 61
62 62 # New session
63 63 ip.history_manager.reset()
64 64 newcmds = ["z=5","class X(object):\n pass", "k='p'"]
65 65 for i, cmd in enumerate(newcmds, start=1):
66 66 ip.history_manager.store_inputs(i, cmd)
67 67 gothist = ip.history_manager.get_range(start=1, stop=4)
68 68 nt.assert_equal(list(gothist), zip([0,0,0],[1,2,3], newcmds))
69 69 # Previous session:
70 70 gothist = ip.history_manager.get_range(-1, 1, 4)
71 71 nt.assert_equal(list(gothist), zip([1,1,1],[1,2,3], hist))
72 72
73 73 # Check get_hist_tail
74 74 gothist = ip.history_manager.get_tail(4, output=True,
75 75 include_latest=True)
76 76 expected = [(1, 3, (hist[-1], "spam")),
77 77 (2, 1, (newcmds[0], None)),
78 78 (2, 2, (newcmds[1], None)),
79 79 (2, 3, (newcmds[2], None)),]
80 80 nt.assert_equal(list(gothist), expected)
81 81
82 82 gothist = ip.history_manager.get_tail(2)
83 83 expected = [(2, 1, newcmds[0]),
84 84 (2, 2, newcmds[1])]
85 85 nt.assert_equal(list(gothist), expected)
86 86
87 87 # Check get_hist_search
88 88 gothist = ip.history_manager.search("*test*")
89 89 nt.assert_equal(list(gothist), [(1,2,hist[1])] )
90 90 gothist = ip.history_manager.search("b*", output=True)
91 91 nt.assert_equal(list(gothist), [(1,3,(hist[2],"spam"))] )
92 92
93 93 # Cross testing: check that magic %save can get previous session.
94 94 testfilename = os.path.realpath(os.path.join(tmpdir, "test.py"))
95 95 ip.magic_save(testfilename + " ~1/1-3")
96 96 with py3compat.open(testfilename) as testfile:
97 97 nt.assert_equal(testfile.read(),
98 98 u"# coding: utf-8\n" + u"\n".join(hist))
99 99
100 100 # Duplicate line numbers - check that it doesn't crash, and
101 101 # gets a new session
102 102 ip.history_manager.store_inputs(1, "rogue")
103 103 ip.history_manager.writeout_cache()
104 104 nt.assert_equal(ip.history_manager.session_number, 3)
105 105 finally:
106 106 # Restore history manager
107 107 ip.history_manager = hist_manager_ori
108 108
109 109
110 110 def test_extract_hist_ranges():
111 111 instr = "1 2/3 ~4/5-6 ~4/7-~4/9 ~9/2-~7/5"
112 112 expected = [(0, 1, 2), # 0 == current session
113 113 (2, 3, 4),
114 114 (-4, 5, 7),
115 115 (-4, 7, 10),
116 116 (-9, 2, None), # None == to end
117 117 (-8, 1, None),
118 118 (-7, 1, 6)]
119 119 actual = list(extract_hist_ranges(instr))
120 120 nt.assert_equal(actual, expected)
121 121
122 122 def test_magic_rerun():
123 123 """Simple test for %rerun (no args -> rerun last line)"""
124 124 ip = get_ipython()
125 125 ip.run_cell("a = 10", store_history=True)
126 126 ip.run_cell("a += 1", store_history=True)
127 127 nt.assert_equal(ip.user_ns["a"], 11)
128 128 ip.run_cell("%rerun", store_history=True)
129 129 nt.assert_equal(ip.user_ns["a"], 12)
130 130
131 131 def test_timestamp_type():
132 132 ip = get_ipython()
133 133 info = ip.history_manager.get_session_info()
134 134 nt.assert_true(isinstance(info[1], datetime))
135 135
136 136 def test_hist_file_config():
137 137 cfg = Config()
138 138 tfile = tempfile.NamedTemporaryFile(delete=False)
139 139 cfg.HistoryManager.hist_file = tfile.name
140 140 try:
141 141 hm = HistoryManager(shell=get_ipython(), config=cfg)
142 142 nt.assert_equals(hm.hist_file, cfg.HistoryManager.hist_file)
143 143 finally:
144 144 try:
145 145 os.remove(tfile.name)
146 146 except OSError:
147 147 # same catch as in testing.tools.TempFileMixin
148 148 # On Windows, even though we close the file, we still can't
149 149 # delete it. I have no clue why
150 150 pass
151 151
1 NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,391 +1,391 b''
1 1 """Generic testing tools.
2 2
3 3 In particular, this module exposes a set of top-level assert* functions that
4 4 can be used in place of nose.tools.assert* in method generators (the ones in
5 5 nose can not, at least as of nose 0.10.4).
6 6
7 7
8 8 Authors
9 9 -------
10 10 - Fernando Perez <Fernando.Perez@berkeley.edu>
11 11 """
12 12
13 13 from __future__ import absolute_import
14 14
15 15 #-----------------------------------------------------------------------------
16 16 # Copyright (C) 2009-2011 The IPython Development Team
17 17 #
18 18 # Distributed under the terms of the BSD License. The full license is in
19 19 # the file COPYING, distributed as part of this software.
20 20 #-----------------------------------------------------------------------------
21 21
22 22 #-----------------------------------------------------------------------------
23 23 # Imports
24 24 #-----------------------------------------------------------------------------
25 25
26 26 import os
27 27 import re
28 28 import sys
29 29 import tempfile
30 30
31 31 from contextlib import contextmanager
32 32 from io import StringIO
33 33
34 34 try:
35 35 # These tools are used by parts of the runtime, so we make the nose
36 36 # dependency optional at this point. Nose is a hard dependency to run the
37 37 # test suite, but NOT to use ipython itself.
38 38 import nose.tools as nt
39 39 has_nose = True
40 40 except ImportError:
41 41 has_nose = False
42 42
43 43 from IPython.config.loader import Config
44 44 from IPython.utils.process import find_cmd, getoutputerror
45 from IPython.utils.text import list_strings, getdefaultencoding
45 from IPython.utils.text import list_strings
46 46 from IPython.utils.io import temp_pyfile, Tee
47 47 from IPython.utils import py3compat
48 48
49 49 from . import decorators as dec
50 50 from . import skipdoctest
51 51
52 52 #-----------------------------------------------------------------------------
53 53 # Globals
54 54 #-----------------------------------------------------------------------------
55 55
56 56 # Make a bunch of nose.tools assert wrappers that can be used in test
57 57 # generators. This will expose an assert* function for each one in nose.tools.
58 58
59 59 _tpl = """
60 60 def %(name)s(*a,**kw):
61 61 return nt.%(name)s(*a,**kw)
62 62 """
63 63
64 64 if has_nose:
65 65 for _x in [a for a in dir(nt) if a.startswith('assert')]:
66 66 exec _tpl % dict(name=_x)
67 67
68 68 #-----------------------------------------------------------------------------
69 69 # Functions and classes
70 70 #-----------------------------------------------------------------------------
71 71
72 72 # The docstring for full_path doctests differently on win32 (different path
73 73 # separator) so just skip the doctest there. The example remains informative.
74 74 doctest_deco = skipdoctest.skip_doctest if sys.platform == 'win32' else dec.null_deco
75 75
76 76 @doctest_deco
77 77 def full_path(startPath,files):
78 78 """Make full paths for all the listed files, based on startPath.
79 79
80 80 Only the base part of startPath is kept, since this routine is typically
81 81 used with a script's __file__ variable as startPath. The base of startPath
82 82 is then prepended to all the listed files, forming the output list.
83 83
84 84 Parameters
85 85 ----------
86 86 startPath : string
87 87 Initial path to use as the base for the results. This path is split
88 88 using os.path.split() and only its first component is kept.
89 89
90 90 files : string or list
91 91 One or more files.
92 92
93 93 Examples
94 94 --------
95 95
96 96 >>> full_path('/foo/bar.py',['a.txt','b.txt'])
97 97 ['/foo/a.txt', '/foo/b.txt']
98 98
99 99 >>> full_path('/foo',['a.txt','b.txt'])
100 100 ['/a.txt', '/b.txt']
101 101
102 102 If a single file is given, the output is still a list:
103 103 >>> full_path('/foo','a.txt')
104 104 ['/a.txt']
105 105 """
106 106
107 107 files = list_strings(files)
108 108 base = os.path.split(startPath)[0]
109 109 return [ os.path.join(base,f) for f in files ]
110 110
111 111
112 112 def parse_test_output(txt):
113 113 """Parse the output of a test run and return errors, failures.
114 114
115 115 Parameters
116 116 ----------
117 117 txt : str
118 118 Text output of a test run, assumed to contain a line of one of the
119 119 following forms::
120 120 'FAILED (errors=1)'
121 121 'FAILED (failures=1)'
122 122 'FAILED (errors=1, failures=1)'
123 123
124 124 Returns
125 125 -------
126 126 nerr, nfail: number of errors and failures.
127 127 """
128 128
129 129 err_m = re.search(r'^FAILED \(errors=(\d+)\)', txt, re.MULTILINE)
130 130 if err_m:
131 131 nerr = int(err_m.group(1))
132 132 nfail = 0
133 133 return nerr, nfail
134 134
135 135 fail_m = re.search(r'^FAILED \(failures=(\d+)\)', txt, re.MULTILINE)
136 136 if fail_m:
137 137 nerr = 0
138 138 nfail = int(fail_m.group(1))
139 139 return nerr, nfail
140 140
141 141 both_m = re.search(r'^FAILED \(errors=(\d+), failures=(\d+)\)', txt,
142 142 re.MULTILINE)
143 143 if both_m:
144 144 nerr = int(both_m.group(1))
145 145 nfail = int(both_m.group(2))
146 146 return nerr, nfail
147 147
148 148 # If the input didn't match any of these forms, assume no error/failures
149 149 return 0, 0
150 150
151 151
152 152 # So nose doesn't think this is a test
153 153 parse_test_output.__test__ = False
154 154
155 155
156 156 def default_argv():
157 157 """Return a valid default argv for creating testing instances of ipython"""
158 158
159 159 return ['--quick', # so no config file is loaded
160 160 # Other defaults to minimize side effects on stdout
161 161 '--colors=NoColor', '--no-term-title','--no-banner',
162 162 '--autocall=0']
163 163
164 164
165 165 def default_config():
166 166 """Return a config object with good defaults for testing."""
167 167 config = Config()
168 168 config.TerminalInteractiveShell.colors = 'NoColor'
169 169 config.TerminalTerminalInteractiveShell.term_title = False,
170 170 config.TerminalInteractiveShell.autocall = 0
171 171 config.HistoryManager.hist_file = tempfile.mktemp(u'test_hist.sqlite')
172 172 config.HistoryManager.db_cache_size = 10000
173 173 return config
174 174
175 175
176 176 def ipexec(fname, options=None):
177 177 """Utility to call 'ipython filename'.
178 178
179 179 Starts IPython witha minimal and safe configuration to make startup as fast
180 180 as possible.
181 181
182 182 Note that this starts IPython in a subprocess!
183 183
184 184 Parameters
185 185 ----------
186 186 fname : str
187 187 Name of file to be executed (should have .py or .ipy extension).
188 188
189 189 options : optional, list
190 190 Extra command-line flags to be passed to IPython.
191 191
192 192 Returns
193 193 -------
194 194 (stdout, stderr) of ipython subprocess.
195 195 """
196 196 if options is None: options = []
197 197
198 198 # For these subprocess calls, eliminate all prompt printing so we only see
199 199 # output from script execution
200 200 prompt_opts = [ '--PromptManager.in_template=""',
201 201 '--PromptManager.in2_template=""',
202 202 '--PromptManager.out_template=""'
203 203 ]
204 204 cmdargs = ' '.join(default_argv() + prompt_opts + options)
205 205
206 206 _ip = get_ipython()
207 207 test_dir = os.path.dirname(__file__)
208 208
209 209 ipython_cmd = find_cmd('ipython3' if py3compat.PY3 else 'ipython')
210 210 # Absolute path for filename
211 211 full_fname = os.path.join(test_dir, fname)
212 212 full_cmd = '%s %s %s' % (ipython_cmd, cmdargs, full_fname)
213 213 #print >> sys.stderr, 'FULL CMD:', full_cmd # dbg
214 214 out, err = getoutputerror(full_cmd)
215 215 # `import readline` causes 'ESC[?1034h' to be output sometimes,
216 216 # so strip that out before doing comparisons
217 217 if out:
218 218 out = re.sub(r'\x1b\[[^h]+h', '', out)
219 219 return out, err
220 220
221 221
222 222 def ipexec_validate(fname, expected_out, expected_err='',
223 223 options=None):
224 224 """Utility to call 'ipython filename' and validate output/error.
225 225
226 226 This function raises an AssertionError if the validation fails.
227 227
228 228 Note that this starts IPython in a subprocess!
229 229
230 230 Parameters
231 231 ----------
232 232 fname : str
233 233 Name of the file to be executed (should have .py or .ipy extension).
234 234
235 235 expected_out : str
236 236 Expected stdout of the process.
237 237
238 238 expected_err : optional, str
239 239 Expected stderr of the process.
240 240
241 241 options : optional, list
242 242 Extra command-line flags to be passed to IPython.
243 243
244 244 Returns
245 245 -------
246 246 None
247 247 """
248 248
249 249 import nose.tools as nt
250 250
251 251 out, err = ipexec(fname, options)
252 252 #print 'OUT', out # dbg
253 253 #print 'ERR', err # dbg
254 254 # If there are any errors, we must check those befor stdout, as they may be
255 255 # more informative than simply having an empty stdout.
256 256 if err:
257 257 if expected_err:
258 258 nt.assert_equals(err.strip(), expected_err.strip())
259 259 else:
260 260 raise ValueError('Running file %r produced error: %r' %
261 261 (fname, err))
262 262 # If no errors or output on stderr was expected, match stdout
263 263 nt.assert_equals(out.strip(), expected_out.strip())
264 264
265 265
266 266 class TempFileMixin(object):
267 267 """Utility class to create temporary Python/IPython files.
268 268
269 269 Meant as a mixin class for test cases."""
270 270
271 271 def mktmp(self, src, ext='.py'):
272 272 """Make a valid python temp file."""
273 273 fname, f = temp_pyfile(src, ext)
274 274 self.tmpfile = f
275 275 self.fname = fname
276 276
277 277 def tearDown(self):
278 278 if hasattr(self, 'tmpfile'):
279 279 # If the tmpfile wasn't made because of skipped tests, like in
280 280 # win32, there's nothing to cleanup.
281 281 self.tmpfile.close()
282 282 try:
283 283 os.unlink(self.fname)
284 284 except:
285 285 # On Windows, even though we close the file, we still can't
286 286 # delete it. I have no clue why
287 287 pass
288 288
289 289 pair_fail_msg = ("Testing {0}\n\n"
290 290 "In:\n"
291 291 " {1!r}\n"
292 292 "Expected:\n"
293 293 " {2!r}\n"
294 294 "Got:\n"
295 295 " {3!r}\n")
296 296 def check_pairs(func, pairs):
297 297 """Utility function for the common case of checking a function with a
298 298 sequence of input/output pairs.
299 299
300 300 Parameters
301 301 ----------
302 302 func : callable
303 303 The function to be tested. Should accept a single argument.
304 304 pairs : iterable
305 305 A list of (input, expected_output) tuples.
306 306
307 307 Returns
308 308 -------
309 309 None. Raises an AssertionError if any output does not match the expected
310 310 value.
311 311 """
312 312 name = getattr(func, "func_name", getattr(func, "__name__", "<unknown>"))
313 313 for inp, expected in pairs:
314 314 out = func(inp)
315 315 assert out == expected, pair_fail_msg.format(name, inp, expected, out)
316 316
317 317
318 318 if py3compat.PY3:
319 319 MyStringIO = StringIO
320 320 else:
321 321 # In Python 2, stdout/stderr can have either bytes or unicode written to them,
322 322 # so we need a class that can handle both.
323 323 class MyStringIO(StringIO):
324 324 def write(self, s):
325 s = py3compat.cast_unicode(s, encoding=getdefaultencoding())
325 s = py3compat.cast_unicode(s, encoding=py3compat.getdefaultencoding())
326 326 super(MyStringIO, self).write(s)
327 327
328 328 notprinted_msg = """Did not find {0!r} in printed output (on {1}):
329 329 {2!r}"""
330 330
331 331 class AssertPrints(object):
332 332 """Context manager for testing that code prints certain text.
333 333
334 334 Examples
335 335 --------
336 336 >>> with AssertPrints("abc", suppress=False):
337 337 ... print "abcd"
338 338 ... print "def"
339 339 ...
340 340 abcd
341 341 def
342 342 """
343 343 def __init__(self, s, channel='stdout', suppress=True):
344 344 self.s = s
345 345 self.channel = channel
346 346 self.suppress = suppress
347 347
348 348 def __enter__(self):
349 349 self.orig_stream = getattr(sys, self.channel)
350 350 self.buffer = MyStringIO()
351 351 self.tee = Tee(self.buffer, channel=self.channel)
352 352 setattr(sys, self.channel, self.buffer if self.suppress else self.tee)
353 353
354 354 def __exit__(self, etype, value, traceback):
355 355 self.tee.flush()
356 356 setattr(sys, self.channel, self.orig_stream)
357 357 printed = self.buffer.getvalue()
358 358 assert self.s in printed, notprinted_msg.format(self.s, self.channel, printed)
359 359 return False
360 360
361 361 class AssertNotPrints(AssertPrints):
362 362 """Context manager for checking that certain output *isn't* produced.
363 363
364 364 Counterpart of AssertPrints"""
365 365 def __exit__(self, etype, value, traceback):
366 366 self.tee.flush()
367 367 setattr(sys, self.channel, self.orig_stream)
368 368 printed = self.buffer.getvalue()
369 369 assert self.s not in printed, notprinted_msg.format(self.s, self.channel, printed)
370 370 return False
371 371
372 372 @contextmanager
373 373 def mute_warn():
374 374 from IPython.utils import warn
375 375 save_warn = warn.warn
376 376 warn.warn = lambda *a, **kw: None
377 377 try:
378 378 yield
379 379 finally:
380 380 warn.warn = save_warn
381 381
382 382 @contextmanager
383 383 def make_tempfile(name):
384 384 """ Create an empty, named, temporary file for the duration of the context.
385 385 """
386 386 f = open(name, 'w')
387 387 f.close()
388 388 try:
389 389 yield
390 390 finally:
391 391 os.unlink(name)
@@ -1,197 +1,197 b''
1 1 """Posix-specific implementation of process utilities.
2 2
3 3 This file is only meant to be imported by process.py, not by end-users.
4 4 """
5 5
6 6 #-----------------------------------------------------------------------------
7 7 # Copyright (C) 2010-2011 The IPython Development Team
8 8 #
9 9 # Distributed under the terms of the BSD License. The full license is in
10 10 # the file COPYING, distributed as part of this software.
11 11 #-----------------------------------------------------------------------------
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Imports
15 15 #-----------------------------------------------------------------------------
16 16 from __future__ import print_function
17 17
18 18 # Stdlib
19 19 import subprocess as sp
20 20 import sys
21 21
22 22 from IPython.external import pexpect
23 23
24 24 # Our own
25 25 from .autoattr import auto_attr
26 26 from ._process_common import getoutput, arg_split
27 27 from IPython.utils import text
28 28 from IPython.utils import py3compat
29 29
30 30 #-----------------------------------------------------------------------------
31 31 # Function definitions
32 32 #-----------------------------------------------------------------------------
33 33
34 34 def _find_cmd(cmd):
35 35 """Find the full path to a command using which."""
36 36
37 37 path = sp.Popen(['/usr/bin/env', 'which', cmd],
38 38 stdout=sp.PIPE).communicate()[0]
39 39 return py3compat.bytes_to_str(path)
40 40
41 41
42 42 class ProcessHandler(object):
43 43 """Execute subprocesses under the control of pexpect.
44 44 """
45 45 # Timeout in seconds to wait on each reading of the subprocess' output.
46 46 # This should not be set too low to avoid cpu overusage from our side,
47 47 # since we read in a loop whose period is controlled by this timeout.
48 48 read_timeout = 0.05
49 49
50 50 # Timeout to give a process if we receive SIGINT, between sending the
51 51 # SIGINT to the process and forcefully terminating it.
52 52 terminate_timeout = 0.2
53 53
54 54 # File object where stdout and stderr of the subprocess will be written
55 55 logfile = None
56 56
57 57 # Shell to call for subprocesses to execute
58 58 sh = None
59 59
60 60 @auto_attr
61 61 def sh(self):
62 62 sh = pexpect.which('sh')
63 63 if sh is None:
64 64 raise OSError('"sh" shell not found')
65 65 return sh
66 66
67 67 def __init__(self, logfile=None, read_timeout=None, terminate_timeout=None):
68 68 """Arguments are used for pexpect calls."""
69 69 self.read_timeout = (ProcessHandler.read_timeout if read_timeout is
70 70 None else read_timeout)
71 71 self.terminate_timeout = (ProcessHandler.terminate_timeout if
72 72 terminate_timeout is None else
73 73 terminate_timeout)
74 74 self.logfile = sys.stdout if logfile is None else logfile
75 75
76 76 def getoutput(self, cmd):
77 77 """Run a command and return its stdout/stderr as a string.
78 78
79 79 Parameters
80 80 ----------
81 81 cmd : str
82 82 A command to be executed in the system shell.
83 83
84 84 Returns
85 85 -------
86 86 output : str
87 87 A string containing the combination of stdout and stderr from the
88 88 subprocess, in whatever order the subprocess originally wrote to its
89 89 file descriptors (so the order of the information in this string is the
90 90 correct order as would be seen if running the command in a terminal).
91 91 """
92 92 try:
93 93 return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n')
94 94 except KeyboardInterrupt:
95 95 print('^C', file=sys.stderr, end='')
96 96
97 97 def getoutput_pexpect(self, cmd):
98 98 """Run a command and return its stdout/stderr as a string.
99 99
100 100 Parameters
101 101 ----------
102 102 cmd : str
103 103 A command to be executed in the system shell.
104 104
105 105 Returns
106 106 -------
107 107 output : str
108 108 A string containing the combination of stdout and stderr from the
109 109 subprocess, in whatever order the subprocess originally wrote to its
110 110 file descriptors (so the order of the information in this string is the
111 111 correct order as would be seen if running the command in a terminal).
112 112 """
113 113 try:
114 114 return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n')
115 115 except KeyboardInterrupt:
116 116 print('^C', file=sys.stderr, end='')
117 117
118 118 def system(self, cmd):
119 119 """Execute a command in a subshell.
120 120
121 121 Parameters
122 122 ----------
123 123 cmd : str
124 124 A command to be executed in the system shell.
125 125
126 126 Returns
127 127 -------
128 128 int : child's exitstatus
129 129 """
130 130 # Get likely encoding for the output.
131 enc = text.getdefaultencoding()
131 enc = py3compat.getdefaultencoding()
132 132
133 133 # Patterns to match on the output, for pexpect. We read input and
134 134 # allow either a short timeout or EOF
135 135 patterns = [pexpect.TIMEOUT, pexpect.EOF]
136 136 # the index of the EOF pattern in the list.
137 137 # even though we know it's 1, this call means we don't have to worry if
138 138 # we change the above list, and forget to change this value:
139 139 EOF_index = patterns.index(pexpect.EOF)
140 140 # The size of the output stored so far in the process output buffer.
141 141 # Since pexpect only appends to this buffer, each time we print we
142 142 # record how far we've printed, so that next time we only print *new*
143 143 # content from the buffer.
144 144 out_size = 0
145 145 try:
146 146 # Since we're not really searching the buffer for text patterns, we
147 147 # can set pexpect's search window to be tiny and it won't matter.
148 148 # We only search for the 'patterns' timeout or EOF, which aren't in
149 149 # the text itself.
150 150 #child = pexpect.spawn(pcmd, searchwindowsize=1)
151 151 if hasattr(pexpect, 'spawnb'):
152 152 child = pexpect.spawnb(self.sh, args=['-c', cmd]) # Pexpect-U
153 153 else:
154 154 child = pexpect.spawn(self.sh, args=['-c', cmd]) # Vanilla Pexpect
155 155 flush = sys.stdout.flush
156 156 while True:
157 157 # res is the index of the pattern that caused the match, so we
158 158 # know whether we've finished (if we matched EOF) or not
159 159 res_idx = child.expect_list(patterns, self.read_timeout)
160 160 print(child.before[out_size:].decode(enc, 'replace'), end='')
161 161 flush()
162 162 if res_idx==EOF_index:
163 163 break
164 164 # Update the pointer to what we've already printed
165 165 out_size = len(child.before)
166 166 except KeyboardInterrupt:
167 167 # We need to send ^C to the process. The ascii code for '^C' is 3
168 168 # (the character is known as ETX for 'End of Text', see
169 169 # curses.ascii.ETX).
170 170 child.sendline(chr(3))
171 171 # Read and print any more output the program might produce on its
172 172 # way out.
173 173 try:
174 174 out_size = len(child.before)
175 175 child.expect_list(patterns, self.terminate_timeout)
176 176 print(child.before[out_size:].decode(enc, 'replace'), end='')
177 177 sys.stdout.flush()
178 178 except KeyboardInterrupt:
179 179 # Impatient users tend to type it multiple times
180 180 pass
181 181 finally:
182 182 # Ensure the subprocess really is terminated
183 183 child.terminate(force=True)
184 184 # add isalive check, to ensure exitstatus is set:
185 185 child.isalive()
186 186 return child.exitstatus
187 187
188 188
189 189 # Make system() with a functional interface for outside use. Note that we use
190 190 # getoutput() from the _common utils, which is built on top of popen(). Using
191 191 # pexpect to get subprocess output produces difficult to parse output, since
192 192 # programs think they are talking to a tty and produce highly formatted output
193 193 # (ls is a good example) that makes them hard.
194 194 system = ProcessHandler().system
195 195
196 196
197 197
@@ -1,184 +1,184 b''
1 1 """Windows-specific implementation of process utilities.
2 2
3 3 This file is only meant to be imported by process.py, not by end-users.
4 4 """
5 5
6 6 #-----------------------------------------------------------------------------
7 7 # Copyright (C) 2010-2011 The IPython Development Team
8 8 #
9 9 # Distributed under the terms of the BSD License. The full license is in
10 10 # the file COPYING, distributed as part of this software.
11 11 #-----------------------------------------------------------------------------
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Imports
15 15 #-----------------------------------------------------------------------------
16 16 from __future__ import print_function
17 17
18 18 # stdlib
19 19 import os
20 20 import sys
21 21 import ctypes
22 22 import msvcrt
23 23
24 24 from ctypes import c_int, POINTER
25 25 from ctypes.wintypes import LPCWSTR, HLOCAL
26 26 from subprocess import STDOUT
27 27
28 28 # our own imports
29 29 from ._process_common import read_no_interrupt, process_handler, arg_split as py_arg_split
30 30 from . import py3compat
31 31 from . import text
32 32
33 33 #-----------------------------------------------------------------------------
34 34 # Function definitions
35 35 #-----------------------------------------------------------------------------
36 36
37 37 class AvoidUNCPath(object):
38 38 """A context manager to protect command execution from UNC paths.
39 39
40 40 In the Win32 API, commands can't be invoked with the cwd being a UNC path.
41 41 This context manager temporarily changes directory to the 'C:' drive on
42 42 entering, and restores the original working directory on exit.
43 43
44 44 The context manager returns the starting working directory *if* it made a
45 45 change and None otherwise, so that users can apply the necessary adjustment
46 46 to their system calls in the event of a change.
47 47
48 48 Example
49 49 -------
50 50 ::
51 51 cmd = 'dir'
52 52 with AvoidUNCPath() as path:
53 53 if path is not None:
54 54 cmd = '"pushd %s &&"%s' % (path, cmd)
55 55 os.system(cmd)
56 56 """
57 57 def __enter__(self):
58 58 self.path = os.getcwdu()
59 59 self.is_unc_path = self.path.startswith(r"\\")
60 60 if self.is_unc_path:
61 61 # change to c drive (as cmd.exe cannot handle UNC addresses)
62 62 os.chdir("C:")
63 63 return self.path
64 64 else:
65 65 # We return None to signal that there was no change in the working
66 66 # directory
67 67 return None
68 68
69 69 def __exit__(self, exc_type, exc_value, traceback):
70 70 if self.is_unc_path:
71 71 os.chdir(self.path)
72 72
73 73
74 74 def _find_cmd(cmd):
75 75 """Find the full path to a .bat or .exe using the win32api module."""
76 76 try:
77 77 from win32api import SearchPath
78 78 except ImportError:
79 79 raise ImportError('you need to have pywin32 installed for this to work')
80 80 else:
81 81 PATH = os.environ['PATH']
82 82 extensions = ['.exe', '.com', '.bat', '.py']
83 83 path = None
84 84 for ext in extensions:
85 85 try:
86 86 path = SearchPath(PATH, cmd + ext)[0]
87 87 except:
88 88 pass
89 89 if path is None:
90 90 raise OSError("command %r not found" % cmd)
91 91 else:
92 92 return path
93 93
94 94
95 95 def _system_body(p):
96 96 """Callback for _system."""
97 enc = text.getdefaultencoding()
97 enc = py3compat.getdefaultencoding()
98 98 for line in read_no_interrupt(p.stdout).splitlines():
99 99 line = line.decode(enc, 'replace')
100 100 print(line, file=sys.stdout)
101 101 for line in read_no_interrupt(p.stderr).splitlines():
102 102 line = line.decode(enc, 'replace')
103 103 print(line, file=sys.stderr)
104 104
105 105 # Wait to finish for returncode
106 106 return p.wait()
107 107
108 108
109 109 def system(cmd):
110 110 """Win32 version of os.system() that works with network shares.
111 111
112 112 Note that this implementation returns None, as meant for use in IPython.
113 113
114 114 Parameters
115 115 ----------
116 116 cmd : str
117 117 A command to be executed in the system shell.
118 118
119 119 Returns
120 120 -------
121 121 None : we explicitly do NOT return the subprocess status code, as this
122 122 utility is meant to be used extensively in IPython, where any return value
123 123 would trigger :func:`sys.displayhook` calls.
124 124 """
125 125 # The controller provides interactivity with both
126 126 # stdin and stdout
127 127 import _process_win32_controller
128 128 _process_win32_controller.system(cmd)
129 129
130 130
131 131 def getoutput(cmd):
132 132 """Return standard output of executing cmd in a shell.
133 133
134 134 Accepts the same arguments as os.system().
135 135
136 136 Parameters
137 137 ----------
138 138 cmd : str
139 139 A command to be executed in the system shell.
140 140
141 141 Returns
142 142 -------
143 143 stdout : str
144 144 """
145 145
146 146 with AvoidUNCPath() as path:
147 147 if path is not None:
148 148 cmd = '"pushd %s &&"%s' % (path, cmd)
149 149 out = process_handler(cmd, lambda p: p.communicate()[0], STDOUT)
150 150
151 151 if out is None:
152 152 out = ''
153 153 return out
154 154
155 155 try:
156 156 CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW
157 157 CommandLineToArgvW.arg_types = [LPCWSTR, POINTER(c_int)]
158 158 CommandLineToArgvW.res_types = [POINTER(LPCWSTR)]
159 159 LocalFree = ctypes.windll.kernel32.LocalFree
160 160 LocalFree.res_type = HLOCAL
161 161 LocalFree.arg_types = [HLOCAL]
162 162
163 163 def arg_split(commandline, posix=False, strict=True):
164 164 """Split a command line's arguments in a shell-like manner.
165 165
166 166 This is a special version for windows that use a ctypes call to CommandLineToArgvW
167 167 to do the argv splitting. The posix paramter is ignored.
168 168
169 169 If strict=False, process_common.arg_split(...strict=False) is used instead.
170 170 """
171 171 #CommandLineToArgvW returns path to executable if called with empty string.
172 172 if commandline.strip() == "":
173 173 return []
174 174 if not strict:
175 175 # not really a cl-arg, fallback on _process_common
176 176 return py_arg_split(commandline, posix=posix, strict=strict)
177 177 argvn = c_int()
178 178 result_pointer = CommandLineToArgvW(py3compat.cast_unicode(commandline.lstrip()), ctypes.byref(argvn))
179 179 result_array_type = LPCWSTR * argvn.value
180 180 result = [arg for arg in result_array_type.from_address(result_pointer)]
181 181 retval = LocalFree(result_pointer)
182 182 return result
183 183 except AttributeError:
184 184 arg_split = py_arg_split
@@ -1,322 +1,323 b''
1 1 # encoding: utf-8
2 2 """
3 3 IO related utilities.
4 4 """
5 5
6 6 #-----------------------------------------------------------------------------
7 7 # Copyright (C) 2008-2011 The IPython Development Team
8 8 #
9 9 # Distributed under the terms of the BSD License. The full license is in
10 10 # the file COPYING, distributed as part of this software.
11 11 #-----------------------------------------------------------------------------
12 12 from __future__ import print_function
13 13
14 14 #-----------------------------------------------------------------------------
15 15 # Imports
16 16 #-----------------------------------------------------------------------------
17 import os
17 18 import sys
18 19 import tempfile
19 20
20 21 #-----------------------------------------------------------------------------
21 22 # Code
22 23 #-----------------------------------------------------------------------------
23 24
24 25
25 26 class IOStream:
26 27
27 28 def __init__(self,stream, fallback=None):
28 29 if not hasattr(stream,'write') or not hasattr(stream,'flush'):
29 30 if fallback is not None:
30 31 stream = fallback
31 32 else:
32 33 raise ValueError("fallback required, but not specified")
33 34 self.stream = stream
34 35 self._swrite = stream.write
35 36
36 37 # clone all methods not overridden:
37 38 def clone(meth):
38 39 return not hasattr(self, meth) and not meth.startswith('_')
39 40 for meth in filter(clone, dir(stream)):
40 41 setattr(self, meth, getattr(stream, meth))
41 42
42 43 def write(self,data):
43 44 try:
44 45 self._swrite(data)
45 46 except:
46 47 try:
47 48 # print handles some unicode issues which may trip a plain
48 49 # write() call. Emulate write() by using an empty end
49 50 # argument.
50 51 print(data, end='', file=self.stream)
51 52 except:
52 53 # if we get here, something is seriously broken.
53 54 print('ERROR - failed to write data to stream:', self.stream,
54 55 file=sys.stderr)
55 56
56 57 def writelines(self, lines):
57 58 if isinstance(lines, basestring):
58 59 lines = [lines]
59 60 for line in lines:
60 61 self.write(line)
61 62
62 63 # This class used to have a writeln method, but regular files and streams
63 64 # in Python don't have this method. We need to keep this completely
64 65 # compatible so we removed it.
65 66
66 67 @property
67 68 def closed(self):
68 69 return self.stream.closed
69 70
70 71 def close(self):
71 72 pass
72 73
73 74 # setup stdin/stdout/stderr to sys.stdin/sys.stdout/sys.stderr
74 75 devnull = open(os.devnull, 'a')
75 76 stdin = IOStream(sys.stdin, fallback=devnull)
76 77 stdout = IOStream(sys.stdout, fallback=devnull)
77 78 stderr = IOStream(sys.stderr, fallback=devnull)
78 79
79 80 class IOTerm:
80 81 """ Term holds the file or file-like objects for handling I/O operations.
81 82
82 83 These are normally just sys.stdin, sys.stdout and sys.stderr but for
83 84 Windows they can can replaced to allow editing the strings before they are
84 85 displayed."""
85 86
86 87 # In the future, having IPython channel all its I/O operations through
87 88 # this class will make it easier to embed it into other environments which
88 89 # are not a normal terminal (such as a GUI-based shell)
89 90 def __init__(self, stdin=None, stdout=None, stderr=None):
90 91 mymodule = sys.modules[__name__]
91 92 self.stdin = IOStream(stdin, mymodule.stdin)
92 93 self.stdout = IOStream(stdout, mymodule.stdout)
93 94 self.stderr = IOStream(stderr, mymodule.stderr)
94 95
95 96
96 97 class Tee(object):
97 98 """A class to duplicate an output stream to stdout/err.
98 99
99 100 This works in a manner very similar to the Unix 'tee' command.
100 101
101 102 When the object is closed or deleted, it closes the original file given to
102 103 it for duplication.
103 104 """
104 105 # Inspired by:
105 106 # http://mail.python.org/pipermail/python-list/2007-May/442737.html
106 107
107 108 def __init__(self, file_or_name, mode="w", channel='stdout'):
108 109 """Construct a new Tee object.
109 110
110 111 Parameters
111 112 ----------
112 113 file_or_name : filename or open filehandle (writable)
113 114 File that will be duplicated
114 115
115 116 mode : optional, valid mode for open().
116 117 If a filename was give, open with this mode.
117 118
118 119 channel : str, one of ['stdout', 'stderr']
119 120 """
120 121 if channel not in ['stdout', 'stderr']:
121 122 raise ValueError('Invalid channel spec %s' % channel)
122 123
123 124 if hasattr(file_or_name, 'write') and hasattr(file_or_name, 'seek'):
124 125 self.file = file_or_name
125 126 else:
126 127 self.file = open(file_or_name, mode)
127 128 self.channel = channel
128 129 self.ostream = getattr(sys, channel)
129 130 setattr(sys, channel, self)
130 131 self._closed = False
131 132
132 133 def close(self):
133 134 """Close the file and restore the channel."""
134 135 self.flush()
135 136 setattr(sys, self.channel, self.ostream)
136 137 self.file.close()
137 138 self._closed = True
138 139
139 140 def write(self, data):
140 141 """Write data to both channels."""
141 142 self.file.write(data)
142 143 self.ostream.write(data)
143 144 self.ostream.flush()
144 145
145 146 def flush(self):
146 147 """Flush both channels."""
147 148 self.file.flush()
148 149 self.ostream.flush()
149 150
150 151 def __del__(self):
151 152 if not self._closed:
152 153 self.close()
153 154
154 155
155 156 def file_read(filename):
156 157 """Read a file and close it. Returns the file source."""
157 158 fobj = open(filename,'r');
158 159 source = fobj.read();
159 160 fobj.close()
160 161 return source
161 162
162 163
163 164 def file_readlines(filename):
164 165 """Read a file and close it. Returns the file source using readlines()."""
165 166 fobj = open(filename,'r');
166 167 lines = fobj.readlines();
167 168 fobj.close()
168 169 return lines
169 170
170 171
171 172 def raw_input_multi(header='', ps1='==> ', ps2='..> ',terminate_str = '.'):
172 173 """Take multiple lines of input.
173 174
174 175 A list with each line of input as a separate element is returned when a
175 176 termination string is entered (defaults to a single '.'). Input can also
176 177 terminate via EOF (^D in Unix, ^Z-RET in Windows).
177 178
178 179 Lines of input which end in \\ are joined into single entries (and a
179 180 secondary continuation prompt is issued as long as the user terminates
180 181 lines with \\). This allows entering very long strings which are still
181 182 meant to be treated as single entities.
182 183 """
183 184
184 185 try:
185 186 if header:
186 187 header += '\n'
187 188 lines = [raw_input(header + ps1)]
188 189 except EOFError:
189 190 return []
190 191 terminate = [terminate_str]
191 192 try:
192 193 while lines[-1:] != terminate:
193 194 new_line = raw_input(ps1)
194 195 while new_line.endswith('\\'):
195 196 new_line = new_line[:-1] + raw_input(ps2)
196 197 lines.append(new_line)
197 198
198 199 return lines[:-1] # don't return the termination command
199 200 except EOFError:
200 201 print()
201 202 return lines
202 203
203 204
204 205 def raw_input_ext(prompt='', ps2='... '):
205 206 """Similar to raw_input(), but accepts extended lines if input ends with \\."""
206 207
207 208 line = raw_input(prompt)
208 209 while line.endswith('\\'):
209 210 line = line[:-1] + raw_input(ps2)
210 211 return line
211 212
212 213
213 214 def ask_yes_no(prompt,default=None):
214 215 """Asks a question and returns a boolean (y/n) answer.
215 216
216 217 If default is given (one of 'y','n'), it is used if the user input is
217 218 empty. Otherwise the question is repeated until an answer is given.
218 219
219 220 An EOF is treated as the default answer. If there is no default, an
220 221 exception is raised to prevent infinite loops.
221 222
222 223 Valid answers are: y/yes/n/no (match is not case sensitive)."""
223 224
224 225 answers = {'y':True,'n':False,'yes':True,'no':False}
225 226 ans = None
226 227 while ans not in answers.keys():
227 228 try:
228 229 ans = raw_input(prompt+' ').lower()
229 230 if not ans: # response was an empty string
230 231 ans = default
231 232 except KeyboardInterrupt:
232 233 pass
233 234 except EOFError:
234 235 if default in answers.keys():
235 236 ans = default
236 237 print()
237 238 else:
238 239 raise
239 240
240 241 return answers[ans]
241 242
242 243
243 244 class NLprinter:
244 245 """Print an arbitrarily nested list, indicating index numbers.
245 246
246 247 An instance of this class called nlprint is available and callable as a
247 248 function.
248 249
249 250 nlprint(list,indent=' ',sep=': ') -> prints indenting each level by 'indent'
250 251 and using 'sep' to separate the index from the value. """
251 252
252 253 def __init__(self):
253 254 self.depth = 0
254 255
255 256 def __call__(self,lst,pos='',**kw):
256 257 """Prints the nested list numbering levels."""
257 258 kw.setdefault('indent',' ')
258 259 kw.setdefault('sep',': ')
259 260 kw.setdefault('start',0)
260 261 kw.setdefault('stop',len(lst))
261 262 # we need to remove start and stop from kw so they don't propagate
262 263 # into a recursive call for a nested list.
263 264 start = kw['start']; del kw['start']
264 265 stop = kw['stop']; del kw['stop']
265 266 if self.depth == 0 and 'header' in kw.keys():
266 267 print(kw['header'])
267 268
268 269 for idx in range(start,stop):
269 270 elem = lst[idx]
270 271 newpos = pos + str(idx)
271 272 if type(elem)==type([]):
272 273 self.depth += 1
273 274 self.__call__(elem, newpos+",", **kw)
274 275 self.depth -= 1
275 276 else:
276 277 print(kw['indent']*self.depth + newpos + kw["sep"] + repr(elem))
277 278
278 279 nlprint = NLprinter()
279 280
280 281
281 282 def temp_pyfile(src, ext='.py'):
282 283 """Make a temporary python file, return filename and filehandle.
283 284
284 285 Parameters
285 286 ----------
286 287 src : string or list of strings (no need for ending newlines if list)
287 288 Source code to be written to the file.
288 289
289 290 ext : optional, string
290 291 Extension for the generated file.
291 292
292 293 Returns
293 294 -------
294 295 (filename, open filehandle)
295 296 It is the caller's responsibility to close the open file and unlink it.
296 297 """
297 298 fname = tempfile.mkstemp(ext)[1]
298 299 f = open(fname,'w')
299 300 f.write(src)
300 301 f.flush()
301 302 return fname, f
302 303
303 304
304 305 def raw_print(*args, **kw):
305 306 """Raw print to sys.__stdout__, otherwise identical interface to print()."""
306 307
307 308 print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
308 309 file=sys.__stdout__)
309 310 sys.__stdout__.flush()
310 311
311 312
312 313 def raw_print_err(*args, **kw):
313 314 """Raw print to sys.__stderr__, otherwise identical interface to print()."""
314 315
315 316 print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
316 317 file=sys.__stderr__)
317 318 sys.__stderr__.flush()
318 319
319 320
320 321 # Short aliases for quick debugging, do NOT use these in production code.
321 322 rprint = raw_print
322 323 rprinte = raw_print_err
@@ -1,165 +1,165 b''
1 1 """Utilities to manipulate JSON objects.
2 2 """
3 3 #-----------------------------------------------------------------------------
4 4 # Copyright (C) 2010-2011 The IPython Development Team
5 5 #
6 6 # Distributed under the terms of the BSD License. The full license is in
7 7 # the file COPYING.txt, distributed as part of this software.
8 8 #-----------------------------------------------------------------------------
9 9
10 10 #-----------------------------------------------------------------------------
11 11 # Imports
12 12 #-----------------------------------------------------------------------------
13 13 # stdlib
14 14 import re
15 15 import sys
16 16 import types
17 17 from datetime import datetime
18 18
19 19 from IPython.utils import py3compat
20 20 from IPython.utils import text
21 21 next_attr_name = '__next__' if py3compat.PY3 else 'next'
22 22
23 23 #-----------------------------------------------------------------------------
24 24 # Globals and constants
25 25 #-----------------------------------------------------------------------------
26 26
27 27 # timestamp formats
28 28 ISO8601="%Y-%m-%dT%H:%M:%S.%f"
29 29 ISO8601_PAT=re.compile(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+$")
30 30
31 31 #-----------------------------------------------------------------------------
32 32 # Classes and functions
33 33 #-----------------------------------------------------------------------------
34 34
35 35 def rekey(dikt):
36 36 """Rekey a dict that has been forced to use str keys where there should be
37 37 ints by json."""
38 38 for k in dikt.iterkeys():
39 39 if isinstance(k, basestring):
40 40 ik=fk=None
41 41 try:
42 42 ik = int(k)
43 43 except ValueError:
44 44 try:
45 45 fk = float(k)
46 46 except ValueError:
47 47 continue
48 48 if ik is not None:
49 49 nk = ik
50 50 else:
51 51 nk = fk
52 52 if nk in dikt:
53 53 raise KeyError("already have key %r"%nk)
54 54 dikt[nk] = dikt.pop(k)
55 55 return dikt
56 56
57 57
58 58 def extract_dates(obj):
59 59 """extract ISO8601 dates from unpacked JSON"""
60 60 if isinstance(obj, dict):
61 61 obj = dict(obj) # don't clobber
62 62 for k,v in obj.iteritems():
63 63 obj[k] = extract_dates(v)
64 64 elif isinstance(obj, (list, tuple)):
65 65 obj = [ extract_dates(o) for o in obj ]
66 66 elif isinstance(obj, basestring):
67 67 if ISO8601_PAT.match(obj):
68 68 obj = datetime.strptime(obj, ISO8601)
69 69 return obj
70 70
71 71 def squash_dates(obj):
72 72 """squash datetime objects into ISO8601 strings"""
73 73 if isinstance(obj, dict):
74 74 obj = dict(obj) # don't clobber
75 75 for k,v in obj.iteritems():
76 76 obj[k] = squash_dates(v)
77 77 elif isinstance(obj, (list, tuple)):
78 78 obj = [ squash_dates(o) for o in obj ]
79 79 elif isinstance(obj, datetime):
80 80 obj = obj.strftime(ISO8601)
81 81 return obj
82 82
83 83 def date_default(obj):
84 84 """default function for packing datetime objects in JSON."""
85 85 if isinstance(obj, datetime):
86 86 return obj.strftime(ISO8601)
87 87 else:
88 88 raise TypeError("%r is not JSON serializable"%obj)
89 89
90 90
91 91
92 92 def json_clean(obj):
93 93 """Clean an object to ensure it's safe to encode in JSON.
94 94
95 95 Atomic, immutable objects are returned unmodified. Sets and tuples are
96 96 converted to lists, lists are copied and dicts are also copied.
97 97
98 98 Note: dicts whose keys could cause collisions upon encoding (such as a dict
99 99 with both the number 1 and the string '1' as keys) will cause a ValueError
100 100 to be raised.
101 101
102 102 Parameters
103 103 ----------
104 104 obj : any python object
105 105
106 106 Returns
107 107 -------
108 108 out : object
109 109
110 110 A version of the input which will not cause an encoding error when
111 111 encoded as JSON. Note that this function does not *encode* its inputs,
112 112 it simply sanitizes it so that there will be no encoding errors later.
113 113
114 114 Examples
115 115 --------
116 116 >>> json_clean(4)
117 117 4
118 118 >>> json_clean(range(10))
119 119 [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
120 120 >>> json_clean(dict(x=1, y=2))
121 121 {'y': 2, 'x': 1}
122 122 >>> json_clean(dict(x=1, y=2, z=[1,2,3]))
123 123 {'y': 2, 'x': 1, 'z': [1, 2, 3]}
124 124 >>> json_clean(True)
125 125 True
126 126 """
127 127 # types that are 'atomic' and ok in json as-is. bool doesn't need to be
128 128 # listed explicitly because bools pass as int instances
129 129 atomic_ok = (unicode, int, float, types.NoneType)
130 130
131 131 # containers that we need to convert into lists
132 132 container_to_list = (tuple, set, types.GeneratorType)
133 133
134 134 if isinstance(obj, atomic_ok):
135 135 return obj
136 136
137 137 if isinstance(obj, bytes):
138 return obj.decode(text.getdefaultencoding(), 'replace')
138 return obj.decode(py3compat.getdefaultencoding(), 'replace')
139 139
140 140 if isinstance(obj, container_to_list) or (
141 141 hasattr(obj, '__iter__') and hasattr(obj, next_attr_name)):
142 142 obj = list(obj)
143 143
144 144 if isinstance(obj, list):
145 145 return [json_clean(x) for x in obj]
146 146
147 147 if isinstance(obj, dict):
148 148 # First, validate that the dict won't lose data in conversion due to
149 149 # key collisions after stringification. This can happen with keys like
150 150 # True and 'true' or 1 and '1', which collide in JSON.
151 151 nkeys = len(obj)
152 152 nkeys_collapsed = len(set(map(str, obj)))
153 153 if nkeys != nkeys_collapsed:
154 154 raise ValueError('dict can not be safely converted to JSON: '
155 155 'key collision would lead to dropped values')
156 156 # If all OK, proceed by making the new dict that will be json-safe
157 157 out = {}
158 158 for k,v in obj.iteritems():
159 159 out[str(k)] = json_clean(v)
160 160 return out
161 161
162 162 # If we get here, we don't know how to handle the object, so we just get
163 163 # its repr and return that. This will catch lambdas, open sockets, class
164 164 # objects, and any other complicated contraption that json can't encode
165 165 return repr(obj)
@@ -1,183 +1,207 b''
1 1 # coding: utf-8
2 2 """Compatibility tricks for Python 3. Mainly to do with unicode."""
3 3 import __builtin__
4 4 import functools
5 5 import sys
6 6 import re
7 7 import types
8 import locale
8 9
9 10 orig_open = open
10 11
11 12 def no_code(x, encoding=None):
12 13 return x
13 14
14 15 # to deal with the possibility of sys.std* not being a stream at all
15 16 def get_stream_enc(stream, default=None):
16 17 if not hasattr(stream, 'encoding') or not stream.encoding:
17 18 return default
18 19 else:
19 20 return stream.encoding
20 21
22 # Less conservative replacement for sys.getdefaultencoding, that will try
23 # to match the environment.
24 # Defined here as central function, so if we find better choices, we
25 # won't need to make changes all over IPython.
26 def getdefaultencoding():
27 """Return IPython's guess for the default encoding for bytes as text.
28
29 Asks for stdin.encoding first, to match the calling Terminal, but that
30 is often None for subprocesses. Fall back on locale.getpreferredencoding()
31 which should be a sensible platform default (that respects LANG environment),
32 and finally to sys.getdefaultencoding() which is the most conservative option,
33 and usually ASCII.
34 """
35 enc = get_stream_enc(sys.stdin)
36 if not enc or enc=='ascii':
37 try:
38 # There are reports of getpreferredencoding raising errors
39 # in some cases, which may well be fixed, but let's be conservative here.
40 enc = locale.getpreferredencoding()
41 except Exception:
42 pass
43 return enc or sys.getdefaultencoding()
44
21 45 def decode(s, encoding=None):
22 encoding = get_stream_enc(sys.stdin, encoding) or sys.getdefaultencoding()
46 encoding = get_stream_enc(sys.stdin, encoding) or getdefaultencoding()
23 47 return s.decode(encoding, "replace")
24 48
25 49 def encode(u, encoding=None):
26 encoding = get_stream_enc(sys.stdin, encoding) or sys.getdefaultencoding()
50 encoding = get_stream_enc(sys.stdin, encoding) or getdefaultencoding()
27 51 return u.encode(encoding, "replace")
28 52
29 53
30 54 def cast_unicode(s, encoding=None):
31 55 if isinstance(s, bytes):
32 56 return decode(s, encoding)
33 57 return s
34 58
35 59 def cast_bytes(s, encoding=None):
36 60 if not isinstance(s, bytes):
37 61 return encode(s, encoding)
38 62 return s
39 63
40 64 def _modify_str_or_docstring(str_change_func):
41 65 @functools.wraps(str_change_func)
42 66 def wrapper(func_or_str):
43 67 if isinstance(func_or_str, basestring):
44 68 func = None
45 69 doc = func_or_str
46 70 else:
47 71 func = func_or_str
48 72 doc = func.__doc__
49 73
50 74 doc = str_change_func(doc)
51 75
52 76 if func:
53 77 func.__doc__ = doc
54 78 return func
55 79 return doc
56 80 return wrapper
57 81
58 82 if sys.version_info[0] >= 3:
59 83 PY3 = True
60 84
61 85 input = input
62 86 builtin_mod_name = "builtins"
63 87
64 88 str_to_unicode = no_code
65 89 unicode_to_str = no_code
66 90 str_to_bytes = encode
67 91 bytes_to_str = decode
68 92 cast_bytes_py2 = no_code
69 93
70 94 def isidentifier(s, dotted=False):
71 95 if dotted:
72 96 return all(isidentifier(a) for a in s.split("."))
73 97 return s.isidentifier()
74 98
75 99 open = orig_open
76 100
77 101 MethodType = types.MethodType
78 102
79 103 def execfile(fname, glob, loc=None):
80 104 loc = loc if (loc is not None) else glob
81 105 exec compile(open(fname, 'rb').read(), fname, 'exec') in glob, loc
82 106
83 107 # Refactor print statements in doctests.
84 108 _print_statement_re = re.compile(r"\bprint (?P<expr>.*)$", re.MULTILINE)
85 109 def _print_statement_sub(match):
86 110 expr = match.groups('expr')
87 111 return "print(%s)" % expr
88 112
89 113 @_modify_str_or_docstring
90 114 def doctest_refactor_print(doc):
91 115 """Refactor 'print x' statements in a doctest to print(x) style. 2to3
92 116 unfortunately doesn't pick up on our doctests.
93 117
94 118 Can accept a string or a function, so it can be used as a decorator."""
95 119 return _print_statement_re.sub(_print_statement_sub, doc)
96 120
97 121 # Abstract u'abc' syntax:
98 122 @_modify_str_or_docstring
99 123 def u_format(s):
100 124 """"{u}'abc'" --> "'abc'" (Python 3)
101 125
102 126 Accepts a string or a function, so it can be used as a decorator."""
103 127 return s.format(u='')
104 128
105 129 else:
106 130 PY3 = False
107 131
108 132 input = raw_input
109 133 builtin_mod_name = "__builtin__"
110 134
111 135 str_to_unicode = decode
112 136 unicode_to_str = encode
113 137 str_to_bytes = no_code
114 138 bytes_to_str = no_code
115 139 cast_bytes_py2 = cast_bytes
116 140
117 141 import re
118 142 _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$")
119 143 def isidentifier(s, dotted=False):
120 144 if dotted:
121 145 return all(isidentifier(a) for a in s.split("."))
122 146 return bool(_name_re.match(s))
123 147
124 148 class open(object):
125 149 """Wrapper providing key part of Python 3 open() interface."""
126 150 def __init__(self, fname, mode="r", encoding="utf-8"):
127 151 self.f = orig_open(fname, mode)
128 152 self.enc = encoding
129 153
130 154 def write(self, s):
131 155 return self.f.write(s.encode(self.enc))
132 156
133 157 def read(self, size=-1):
134 158 return self.f.read(size).decode(self.enc)
135 159
136 160 def close(self):
137 161 return self.f.close()
138 162
139 163 def __enter__(self):
140 164 return self
141 165
142 166 def __exit__(self, etype, value, traceback):
143 167 self.f.close()
144 168
145 169 def MethodType(func, instance):
146 170 return types.MethodType(func, instance, type(instance))
147 171
148 172 # don't override system execfile on 2.x:
149 173 execfile = execfile
150 174
151 175 def doctest_refactor_print(func_or_str):
152 176 return func_or_str
153 177
154 178
155 179 # Abstract u'abc' syntax:
156 180 @_modify_str_or_docstring
157 181 def u_format(s):
158 182 """"{u}'abc'" --> "u'abc'" (Python 2)
159 183
160 184 Accepts a string or a function, so it can be used as a decorator."""
161 185 return s.format(u='u')
162 186
163 187 if sys.platform == 'win32':
164 188 def execfile(fname, glob=None, loc=None):
165 189 loc = loc if (loc is not None) else glob
166 190 # The rstrip() is necessary b/c trailing whitespace in files will
167 191 # cause an IndentationError in Python 2.6 (this was fixed in 2.7,
168 192 # but we still support 2.6). See issue 1027.
169 193 scripttext = __builtin__.open(fname).read().rstrip() + '\n'
170 194 # compile converts unicode filename to str assuming
171 195 # ascii. Let's do the conversion before calling compile
172 196 if isinstance(fname, unicode):
173 197 filename = unicode_to_str(fname)
174 198 else:
175 199 filename = fname
176 200 exec compile(scripttext, filename, 'exec') in glob, loc
177 201 else:
178 202 def execfile(fname, *where):
179 203 if isinstance(fname, unicode):
180 204 filename = fname.encode(sys.getfilesystemencoding())
181 205 else:
182 206 filename = fname
183 207 __builtin__.execfile(filename, *where)
@@ -1,760 +1,736 b''
1 1 # encoding: utf-8
2 2 """
3 3 Utilities for working with strings and text.
4 4 """
5 5
6 6 #-----------------------------------------------------------------------------
7 7 # Copyright (C) 2008-2011 The IPython Development Team
8 8 #
9 9 # Distributed under the terms of the BSD License. The full license is in
10 10 # the file COPYING, distributed as part of this software.
11 11 #-----------------------------------------------------------------------------
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Imports
15 15 #-----------------------------------------------------------------------------
16 16
17 17 import __main__
18 18
19 import locale
20 19 import os
21 20 import re
22 21 import shutil
23 22 import sys
24 23 import textwrap
25 24 from string import Formatter
26 25
27 26 from IPython.external.path import path
28 27 from IPython.testing.skipdoctest import skip_doctest_py3
29 28 from IPython.utils import py3compat
30 29 from IPython.utils.io import nlprint
31 30 from IPython.utils.data import flatten
32 31
33 32 #-----------------------------------------------------------------------------
34 33 # Code
35 34 #-----------------------------------------------------------------------------
36 35
37 # Less conservative replacement for sys.getdefaultencoding, that will try
38 # to match the environment.
39 # Defined here as central function, so if we find better choices, we
40 # won't need to make changes all over IPython.
41 def getdefaultencoding():
42 """Return IPython's guess for the default encoding for bytes as text.
43
44 Asks for stdin.encoding first, to match the calling Terminal, but that
45 is often None for subprocesses. Fall back on locale.getpreferredencoding()
46 which should be a sensible platform default (that respects LANG environment),
47 and finally to sys.getdefaultencoding() which is the most conservative option,
48 and usually ASCII.
49 """
50 enc = py3compat.get_stream_enc(sys.stdin)
51 if not enc or enc=='ascii':
52 try:
53 # There are reports of getpreferredencoding raising errors
54 # in some cases, which may well be fixed, but let's be conservative here.
55 enc = locale.getpreferredencoding()
56 except Exception:
57 pass
58 return enc or sys.getdefaultencoding()
59
60 36 def unquote_ends(istr):
61 37 """Remove a single pair of quotes from the endpoints of a string."""
62 38
63 39 if not istr:
64 40 return istr
65 41 if (istr[0]=="'" and istr[-1]=="'") or \
66 42 (istr[0]=='"' and istr[-1]=='"'):
67 43 return istr[1:-1]
68 44 else:
69 45 return istr
70 46
71 47
72 48 class LSString(str):
73 49 """String derivative with a special access attributes.
74 50
75 51 These are normal strings, but with the special attributes:
76 52
77 53 .l (or .list) : value as list (split on newlines).
78 54 .n (or .nlstr): original value (the string itself).
79 55 .s (or .spstr): value as whitespace-separated string.
80 56 .p (or .paths): list of path objects
81 57
82 58 Any values which require transformations are computed only once and
83 59 cached.
84 60
85 61 Such strings are very useful to efficiently interact with the shell, which
86 62 typically only understands whitespace-separated options for commands."""
87 63
88 64 def get_list(self):
89 65 try:
90 66 return self.__list
91 67 except AttributeError:
92 68 self.__list = self.split('\n')
93 69 return self.__list
94 70
95 71 l = list = property(get_list)
96 72
97 73 def get_spstr(self):
98 74 try:
99 75 return self.__spstr
100 76 except AttributeError:
101 77 self.__spstr = self.replace('\n',' ')
102 78 return self.__spstr
103 79
104 80 s = spstr = property(get_spstr)
105 81
106 82 def get_nlstr(self):
107 83 return self
108 84
109 85 n = nlstr = property(get_nlstr)
110 86
111 87 def get_paths(self):
112 88 try:
113 89 return self.__paths
114 90 except AttributeError:
115 91 self.__paths = [path(p) for p in self.split('\n') if os.path.exists(p)]
116 92 return self.__paths
117 93
118 94 p = paths = property(get_paths)
119 95
120 96 # FIXME: We need to reimplement type specific displayhook and then add this
121 97 # back as a custom printer. This should also be moved outside utils into the
122 98 # core.
123 99
124 100 # def print_lsstring(arg):
125 101 # """ Prettier (non-repr-like) and more informative printer for LSString """
126 102 # print "LSString (.p, .n, .l, .s available). Value:"
127 103 # print arg
128 104 #
129 105 #
130 106 # print_lsstring = result_display.when_type(LSString)(print_lsstring)
131 107
132 108
133 109 class SList(list):
134 110 """List derivative with a special access attributes.
135 111
136 112 These are normal lists, but with the special attributes:
137 113
138 114 .l (or .list) : value as list (the list itself).
139 115 .n (or .nlstr): value as a string, joined on newlines.
140 116 .s (or .spstr): value as a string, joined on spaces.
141 117 .p (or .paths): list of path objects
142 118
143 119 Any values which require transformations are computed only once and
144 120 cached."""
145 121
146 122 def get_list(self):
147 123 return self
148 124
149 125 l = list = property(get_list)
150 126
151 127 def get_spstr(self):
152 128 try:
153 129 return self.__spstr
154 130 except AttributeError:
155 131 self.__spstr = ' '.join(self)
156 132 return self.__spstr
157 133
158 134 s = spstr = property(get_spstr)
159 135
160 136 def get_nlstr(self):
161 137 try:
162 138 return self.__nlstr
163 139 except AttributeError:
164 140 self.__nlstr = '\n'.join(self)
165 141 return self.__nlstr
166 142
167 143 n = nlstr = property(get_nlstr)
168 144
169 145 def get_paths(self):
170 146 try:
171 147 return self.__paths
172 148 except AttributeError:
173 149 self.__paths = [path(p) for p in self if os.path.exists(p)]
174 150 return self.__paths
175 151
176 152 p = paths = property(get_paths)
177 153
178 154 def grep(self, pattern, prune = False, field = None):
179 155 """ Return all strings matching 'pattern' (a regex or callable)
180 156
181 157 This is case-insensitive. If prune is true, return all items
182 158 NOT matching the pattern.
183 159
184 160 If field is specified, the match must occur in the specified
185 161 whitespace-separated field.
186 162
187 163 Examples::
188 164
189 165 a.grep( lambda x: x.startswith('C') )
190 166 a.grep('Cha.*log', prune=1)
191 167 a.grep('chm', field=-1)
192 168 """
193 169
194 170 def match_target(s):
195 171 if field is None:
196 172 return s
197 173 parts = s.split()
198 174 try:
199 175 tgt = parts[field]
200 176 return tgt
201 177 except IndexError:
202 178 return ""
203 179
204 180 if isinstance(pattern, basestring):
205 181 pred = lambda x : re.search(pattern, x, re.IGNORECASE)
206 182 else:
207 183 pred = pattern
208 184 if not prune:
209 185 return SList([el for el in self if pred(match_target(el))])
210 186 else:
211 187 return SList([el for el in self if not pred(match_target(el))])
212 188
213 189 def fields(self, *fields):
214 190 """ Collect whitespace-separated fields from string list
215 191
216 192 Allows quick awk-like usage of string lists.
217 193
218 194 Example data (in var a, created by 'a = !ls -l')::
219 195 -rwxrwxrwx 1 ville None 18 Dec 14 2006 ChangeLog
220 196 drwxrwxrwx+ 6 ville None 0 Oct 24 18:05 IPython
221 197
222 198 a.fields(0) is ['-rwxrwxrwx', 'drwxrwxrwx+']
223 199 a.fields(1,0) is ['1 -rwxrwxrwx', '6 drwxrwxrwx+']
224 200 (note the joining by space).
225 201 a.fields(-1) is ['ChangeLog', 'IPython']
226 202
227 203 IndexErrors are ignored.
228 204
229 205 Without args, fields() just split()'s the strings.
230 206 """
231 207 if len(fields) == 0:
232 208 return [el.split() for el in self]
233 209
234 210 res = SList()
235 211 for el in [f.split() for f in self]:
236 212 lineparts = []
237 213
238 214 for fd in fields:
239 215 try:
240 216 lineparts.append(el[fd])
241 217 except IndexError:
242 218 pass
243 219 if lineparts:
244 220 res.append(" ".join(lineparts))
245 221
246 222 return res
247 223
248 224 def sort(self,field= None, nums = False):
249 225 """ sort by specified fields (see fields())
250 226
251 227 Example::
252 228 a.sort(1, nums = True)
253 229
254 230 Sorts a by second field, in numerical order (so that 21 > 3)
255 231
256 232 """
257 233
258 234 #decorate, sort, undecorate
259 235 if field is not None:
260 236 dsu = [[SList([line]).fields(field), line] for line in self]
261 237 else:
262 238 dsu = [[line, line] for line in self]
263 239 if nums:
264 240 for i in range(len(dsu)):
265 241 numstr = "".join([ch for ch in dsu[i][0] if ch.isdigit()])
266 242 try:
267 243 n = int(numstr)
268 244 except ValueError:
269 245 n = 0;
270 246 dsu[i][0] = n
271 247
272 248
273 249 dsu.sort()
274 250 return SList([t[1] for t in dsu])
275 251
276 252
277 253 # FIXME: We need to reimplement type specific displayhook and then add this
278 254 # back as a custom printer. This should also be moved outside utils into the
279 255 # core.
280 256
281 257 # def print_slist(arg):
282 258 # """ Prettier (non-repr-like) and more informative printer for SList """
283 259 # print "SList (.p, .n, .l, .s, .grep(), .fields(), sort() available):"
284 260 # if hasattr(arg, 'hideonce') and arg.hideonce:
285 261 # arg.hideonce = False
286 262 # return
287 263 #
288 264 # nlprint(arg)
289 265 #
290 266 # print_slist = result_display.when_type(SList)(print_slist)
291 267
292 268
293 269 def esc_quotes(strng):
294 270 """Return the input string with single and double quotes escaped out"""
295 271
296 272 return strng.replace('"','\\"').replace("'","\\'")
297 273
298 274
299 275 def qw(words,flat=0,sep=None,maxsplit=-1):
300 276 """Similar to Perl's qw() operator, but with some more options.
301 277
302 278 qw(words,flat=0,sep=' ',maxsplit=-1) -> words.split(sep,maxsplit)
303 279
304 280 words can also be a list itself, and with flat=1, the output will be
305 281 recursively flattened.
306 282
307 283 Examples:
308 284
309 285 >>> qw('1 2')
310 286 ['1', '2']
311 287
312 288 >>> qw(['a b','1 2',['m n','p q']])
313 289 [['a', 'b'], ['1', '2'], [['m', 'n'], ['p', 'q']]]
314 290
315 291 >>> qw(['a b','1 2',['m n','p q']],flat=1)
316 292 ['a', 'b', '1', '2', 'm', 'n', 'p', 'q']
317 293 """
318 294
319 295 if isinstance(words, basestring):
320 296 return [word.strip() for word in words.split(sep,maxsplit)
321 297 if word and not word.isspace() ]
322 298 if flat:
323 299 return flatten(map(qw,words,[1]*len(words)))
324 300 return map(qw,words)
325 301
326 302
327 303 def qwflat(words,sep=None,maxsplit=-1):
328 304 """Calls qw(words) in flat mode. It's just a convenient shorthand."""
329 305 return qw(words,1,sep,maxsplit)
330 306
331 307
332 308 def qw_lol(indata):
333 309 """qw_lol('a b') -> [['a','b']],
334 310 otherwise it's just a call to qw().
335 311
336 312 We need this to make sure the modules_some keys *always* end up as a
337 313 list of lists."""
338 314
339 315 if isinstance(indata, basestring):
340 316 return [qw(indata)]
341 317 else:
342 318 return qw(indata)
343 319
344 320
345 321 def grep(pat,list,case=1):
346 322 """Simple minded grep-like function.
347 323 grep(pat,list) returns occurrences of pat in list, None on failure.
348 324
349 325 It only does simple string matching, with no support for regexps. Use the
350 326 option case=0 for case-insensitive matching."""
351 327
352 328 # This is pretty crude. At least it should implement copying only references
353 329 # to the original data in case it's big. Now it copies the data for output.
354 330 out=[]
355 331 if case:
356 332 for term in list:
357 333 if term.find(pat)>-1: out.append(term)
358 334 else:
359 335 lpat=pat.lower()
360 336 for term in list:
361 337 if term.lower().find(lpat)>-1: out.append(term)
362 338
363 339 if len(out): return out
364 340 else: return None
365 341
366 342
367 343 def dgrep(pat,*opts):
368 344 """Return grep() on dir()+dir(__builtins__).
369 345
370 346 A very common use of grep() when working interactively."""
371 347
372 348 return grep(pat,dir(__main__)+dir(__main__.__builtins__),*opts)
373 349
374 350
375 351 def idgrep(pat):
376 352 """Case-insensitive dgrep()"""
377 353
378 354 return dgrep(pat,0)
379 355
380 356
381 357 def igrep(pat,list):
382 358 """Synonym for case-insensitive grep."""
383 359
384 360 return grep(pat,list,case=0)
385 361
386 362
387 363 def indent(instr,nspaces=4, ntabs=0, flatten=False):
388 364 """Indent a string a given number of spaces or tabstops.
389 365
390 366 indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces.
391 367
392 368 Parameters
393 369 ----------
394 370
395 371 instr : basestring
396 372 The string to be indented.
397 373 nspaces : int (default: 4)
398 374 The number of spaces to be indented.
399 375 ntabs : int (default: 0)
400 376 The number of tabs to be indented.
401 377 flatten : bool (default: False)
402 378 Whether to scrub existing indentation. If True, all lines will be
403 379 aligned to the same indentation. If False, existing indentation will
404 380 be strictly increased.
405 381
406 382 Returns
407 383 -------
408 384
409 385 str|unicode : string indented by ntabs and nspaces.
410 386
411 387 """
412 388 if instr is None:
413 389 return
414 390 ind = '\t'*ntabs+' '*nspaces
415 391 if flatten:
416 392 pat = re.compile(r'^\s*', re.MULTILINE)
417 393 else:
418 394 pat = re.compile(r'^', re.MULTILINE)
419 395 outstr = re.sub(pat, ind, instr)
420 396 if outstr.endswith(os.linesep+ind):
421 397 return outstr[:-len(ind)]
422 398 else:
423 399 return outstr
424 400
425 401 def native_line_ends(filename,backup=1):
426 402 """Convert (in-place) a file to line-ends native to the current OS.
427 403
428 404 If the optional backup argument is given as false, no backup of the
429 405 original file is left. """
430 406
431 407 backup_suffixes = {'posix':'~','dos':'.bak','nt':'.bak','mac':'.bak'}
432 408
433 409 bak_filename = filename + backup_suffixes[os.name]
434 410
435 411 original = open(filename).read()
436 412 shutil.copy2(filename,bak_filename)
437 413 try:
438 414 new = open(filename,'wb')
439 415 new.write(os.linesep.join(original.splitlines()))
440 416 new.write(os.linesep) # ALWAYS put an eol at the end of the file
441 417 new.close()
442 418 except:
443 419 os.rename(bak_filename,filename)
444 420 if not backup:
445 421 try:
446 422 os.remove(bak_filename)
447 423 except:
448 424 pass
449 425
450 426
451 427 def list_strings(arg):
452 428 """Always return a list of strings, given a string or list of strings
453 429 as input.
454 430
455 431 :Examples:
456 432
457 433 In [7]: list_strings('A single string')
458 434 Out[7]: ['A single string']
459 435
460 436 In [8]: list_strings(['A single string in a list'])
461 437 Out[8]: ['A single string in a list']
462 438
463 439 In [9]: list_strings(['A','list','of','strings'])
464 440 Out[9]: ['A', 'list', 'of', 'strings']
465 441 """
466 442
467 443 if isinstance(arg,basestring): return [arg]
468 444 else: return arg
469 445
470 446
471 447 def marquee(txt='',width=78,mark='*'):
472 448 """Return the input string centered in a 'marquee'.
473 449
474 450 :Examples:
475 451
476 452 In [16]: marquee('A test',40)
477 453 Out[16]: '**************** A test ****************'
478 454
479 455 In [17]: marquee('A test',40,'-')
480 456 Out[17]: '---------------- A test ----------------'
481 457
482 458 In [18]: marquee('A test',40,' ')
483 459 Out[18]: ' A test '
484 460
485 461 """
486 462 if not txt:
487 463 return (mark*width)[:width]
488 464 nmark = (width-len(txt)-2)//len(mark)//2
489 465 if nmark < 0: nmark =0
490 466 marks = mark*nmark
491 467 return '%s %s %s' % (marks,txt,marks)
492 468
493 469
494 470 ini_spaces_re = re.compile(r'^(\s+)')
495 471
496 472 def num_ini_spaces(strng):
497 473 """Return the number of initial spaces in a string"""
498 474
499 475 ini_spaces = ini_spaces_re.match(strng)
500 476 if ini_spaces:
501 477 return ini_spaces.end()
502 478 else:
503 479 return 0
504 480
505 481
506 482 def format_screen(strng):
507 483 """Format a string for screen printing.
508 484
509 485 This removes some latex-type format codes."""
510 486 # Paragraph continue
511 487 par_re = re.compile(r'\\$',re.MULTILINE)
512 488 strng = par_re.sub('',strng)
513 489 return strng
514 490
515 491 def dedent(text):
516 492 """Equivalent of textwrap.dedent that ignores unindented first line.
517 493
518 494 This means it will still dedent strings like:
519 495 '''foo
520 496 is a bar
521 497 '''
522 498
523 499 For use in wrap_paragraphs.
524 500 """
525 501
526 502 if text.startswith('\n'):
527 503 # text starts with blank line, don't ignore the first line
528 504 return textwrap.dedent(text)
529 505
530 506 # split first line
531 507 splits = text.split('\n',1)
532 508 if len(splits) == 1:
533 509 # only one line
534 510 return textwrap.dedent(text)
535 511
536 512 first, rest = splits
537 513 # dedent everything but the first line
538 514 rest = textwrap.dedent(rest)
539 515 return '\n'.join([first, rest])
540 516
541 517 def wrap_paragraphs(text, ncols=80):
542 518 """Wrap multiple paragraphs to fit a specified width.
543 519
544 520 This is equivalent to textwrap.wrap, but with support for multiple
545 521 paragraphs, as separated by empty lines.
546 522
547 523 Returns
548 524 -------
549 525
550 526 list of complete paragraphs, wrapped to fill `ncols` columns.
551 527 """
552 528 paragraph_re = re.compile(r'\n(\s*\n)+', re.MULTILINE)
553 529 text = dedent(text).strip()
554 530 paragraphs = paragraph_re.split(text)[::2] # every other entry is space
555 531 out_ps = []
556 532 indent_re = re.compile(r'\n\s+', re.MULTILINE)
557 533 for p in paragraphs:
558 534 # presume indentation that survives dedent is meaningful formatting,
559 535 # so don't fill unless text is flush.
560 536 if indent_re.search(p) is None:
561 537 # wrap paragraph
562 538 p = textwrap.fill(p, ncols)
563 539 out_ps.append(p)
564 540 return out_ps
565 541
566 542
567 543 class EvalFormatter(Formatter):
568 544 """A String Formatter that allows evaluation of simple expressions.
569 545
570 546 Note that this version interprets a : as specifying a format string (as per
571 547 standard string formatting), so if slicing is required, you must explicitly
572 548 create a slice.
573 549
574 550 This is to be used in templating cases, such as the parallel batch
575 551 script templates, where simple arithmetic on arguments is useful.
576 552
577 553 Examples
578 554 --------
579 555
580 556 In [1]: f = EvalFormatter()
581 557 In [2]: f.format('{n//4}', n=8)
582 558 Out [2]: '2'
583 559
584 560 In [3]: f.format("{greeting[slice(2,4)]}", greeting="Hello")
585 561 Out [3]: 'll'
586 562 """
587 563 def get_field(self, name, args, kwargs):
588 564 v = eval(name, kwargs)
589 565 return v, name
590 566
591 567 @skip_doctest_py3
592 568 class FullEvalFormatter(Formatter):
593 569 """A String Formatter that allows evaluation of simple expressions.
594 570
595 571 Any time a format key is not found in the kwargs,
596 572 it will be tried as an expression in the kwargs namespace.
597 573
598 574 Note that this version allows slicing using [1:2], so you cannot specify
599 575 a format string. Use :class:`EvalFormatter` to permit format strings.
600 576
601 577 Examples
602 578 --------
603 579
604 580 In [1]: f = FullEvalFormatter()
605 581 In [2]: f.format('{n//4}', n=8)
606 582 Out[2]: u'2'
607 583
608 584 In [3]: f.format('{list(range(5))[2:4]}')
609 585 Out[3]: u'[2, 3]'
610 586
611 587 In [4]: f.format('{3*2}')
612 588 Out[4]: u'6'
613 589 """
614 590 # copied from Formatter._vformat with minor changes to allow eval
615 591 # and replace the format_spec code with slicing
616 592 def _vformat(self, format_string, args, kwargs, used_args, recursion_depth):
617 593 if recursion_depth < 0:
618 594 raise ValueError('Max string recursion exceeded')
619 595 result = []
620 596 for literal_text, field_name, format_spec, conversion in \
621 597 self.parse(format_string):
622 598
623 599 # output the literal text
624 600 if literal_text:
625 601 result.append(literal_text)
626 602
627 603 # if there's a field, output it
628 604 if field_name is not None:
629 605 # this is some markup, find the object and do
630 606 # the formatting
631 607
632 608 if format_spec:
633 609 # override format spec, to allow slicing:
634 610 field_name = ':'.join([field_name, format_spec])
635 611
636 612 # eval the contents of the field for the object
637 613 # to be formatted
638 614 obj = eval(field_name, kwargs)
639 615
640 616 # do any conversion on the resulting object
641 617 obj = self.convert_field(obj, conversion)
642 618
643 619 # format the object and append to the result
644 620 result.append(self.format_field(obj, ''))
645 621
646 622 return u''.join(py3compat.cast_unicode(s) for s in result)
647 623
648 624 @skip_doctest_py3
649 625 class DollarFormatter(FullEvalFormatter):
650 626 """Formatter allowing Itpl style $foo replacement, for names and attribute
651 627 access only. Standard {foo} replacement also works, and allows full
652 628 evaluation of its arguments.
653 629
654 630 Examples
655 631 --------
656 632 In [1]: f = DollarFormatter()
657 633 In [2]: f.format('{n//4}', n=8)
658 634 Out[2]: u'2'
659 635
660 636 In [3]: f.format('23 * 76 is $result', result=23*76)
661 637 Out[3]: u'23 * 76 is 1748'
662 638
663 639 In [4]: f.format('$a or {b}', a=1, b=2)
664 640 Out[4]: u'1 or 2'
665 641 """
666 642 _dollar_pattern = re.compile("(.*?)\$(\$?[\w\.]+)")
667 643 def parse(self, fmt_string):
668 644 for literal_txt, field_name, format_spec, conversion \
669 645 in Formatter.parse(self, fmt_string):
670 646
671 647 # Find $foo patterns in the literal text.
672 648 continue_from = 0
673 649 txt = ""
674 650 for m in self._dollar_pattern.finditer(literal_txt):
675 651 new_txt, new_field = m.group(1,2)
676 652 # $$foo --> $foo
677 653 if new_field.startswith("$"):
678 654 txt += new_txt + new_field
679 655 else:
680 656 yield (txt + new_txt, new_field, "", None)
681 657 txt = ""
682 658 continue_from = m.end()
683 659
684 660 # Re-yield the {foo} style pattern
685 661 yield (txt + literal_txt[continue_from:], field_name, format_spec, conversion)
686 662
687 663
688 664 def columnize(items, separator=' ', displaywidth=80):
689 665 """ Transform a list of strings into a single string with columns.
690 666
691 667 Parameters
692 668 ----------
693 669 items : sequence of strings
694 670 The strings to process.
695 671
696 672 separator : str, optional [default is two spaces]
697 673 The string that separates columns.
698 674
699 675 displaywidth : int, optional [default is 80]
700 676 Width of the display in number of characters.
701 677
702 678 Returns
703 679 -------
704 680 The formatted string.
705 681 """
706 682 # Note: this code is adapted from columnize 0.3.2.
707 683 # See http://code.google.com/p/pycolumnize/
708 684
709 685 # Some degenerate cases.
710 686 size = len(items)
711 687 if size == 0:
712 688 return '\n'
713 689 elif size == 1:
714 690 return '%s\n' % items[0]
715 691
716 692 # Special case: if any item is longer than the maximum width, there's no
717 693 # point in triggering the logic below...
718 694 item_len = map(len, items) # save these, we can reuse them below
719 695 longest = max(item_len)
720 696 if longest >= displaywidth:
721 697 return '\n'.join(items+[''])
722 698
723 699 # Try every row count from 1 upwards
724 700 array_index = lambda nrows, row, col: nrows*col + row
725 701 for nrows in range(1, size):
726 702 ncols = (size + nrows - 1) // nrows
727 703 colwidths = []
728 704 totwidth = -len(separator)
729 705 for col in range(ncols):
730 706 # Get max column width for this column
731 707 colwidth = 0
732 708 for row in range(nrows):
733 709 i = array_index(nrows, row, col)
734 710 if i >= size: break
735 711 x, len_x = items[i], item_len[i]
736 712 colwidth = max(colwidth, len_x)
737 713 colwidths.append(colwidth)
738 714 totwidth += colwidth + len(separator)
739 715 if totwidth > displaywidth:
740 716 break
741 717 if totwidth <= displaywidth:
742 718 break
743 719
744 720 # The smallest number of rows computed and the max widths for each
745 721 # column has been obtained. Now we just have to format each of the rows.
746 722 string = ''
747 723 for row in range(nrows):
748 724 texts = []
749 725 for col in range(ncols):
750 726 i = row + nrows*col
751 727 if i >= size:
752 728 texts.append('')
753 729 else:
754 730 texts.append(items[i])
755 731 while texts and not texts[-1]:
756 732 del texts[-1]
757 733 for col in range(len(texts)):
758 734 texts[col] = texts[col].ljust(colwidths[col])
759 735 string += '%s\n' % separator.join(texts)
760 736 return string
@@ -1,91 +1,91 b''
1 1 import sys
2 2 import time
3 3 from io import StringIO
4 4
5 5 from session import extract_header, Message
6 6
7 from IPython.utils import io, text
7 from IPython.utils import io, text, py3compat
8 8
9 9 #-----------------------------------------------------------------------------
10 10 # Globals
11 11 #-----------------------------------------------------------------------------
12 12
13 13 #-----------------------------------------------------------------------------
14 14 # Stream classes
15 15 #-----------------------------------------------------------------------------
16 16
17 17 class OutStream(object):
18 18 """A file like object that publishes the stream to a 0MQ PUB socket."""
19 19
20 20 # The time interval between automatic flushes, in seconds.
21 21 flush_interval = 0.05
22 22 topic=None
23 23
24 24 def __init__(self, session, pub_socket, name):
25 25 self.session = session
26 26 self.pub_socket = pub_socket
27 27 self.name = name
28 28 self.parent_header = {}
29 29 self._new_buffer()
30 30
31 31 def set_parent(self, parent):
32 32 self.parent_header = extract_header(parent)
33 33
34 34 def close(self):
35 35 self.pub_socket = None
36 36
37 37 def flush(self):
38 38 #io.rprint('>>>flushing output buffer: %s<<<' % self.name) # dbg
39 39 if self.pub_socket is None:
40 40 raise ValueError(u'I/O operation on closed file')
41 41 else:
42 42 data = self._buffer.getvalue()
43 43 if data:
44 44 content = {u'name':self.name, u'data':data}
45 45 msg = self.session.send(self.pub_socket, u'stream', content=content,
46 46 parent=self.parent_header, ident=self.topic)
47 47
48 48 if hasattr(self.pub_socket, 'flush'):
49 49 # socket itself has flush (presumably ZMQStream)
50 50 self.pub_socket.flush()
51 51 self._buffer.close()
52 52 self._new_buffer()
53 53
54 54 def isatty(self):
55 55 return False
56 56
57 57 def next(self):
58 58 raise IOError('Read not supported on a write only stream.')
59 59
60 60 def read(self, size=-1):
61 61 raise IOError('Read not supported on a write only stream.')
62 62
63 63 def readline(self, size=-1):
64 64 raise IOError('Read not supported on a write only stream.')
65 65
66 66 def write(self, string):
67 67 if self.pub_socket is None:
68 68 raise ValueError('I/O operation on closed file')
69 69 else:
70 70 # Make sure that we're handling unicode
71 71 if not isinstance(string, unicode):
72 enc = text.getdefaultencoding()
72 enc = py3compat.getdefaultencoding()
73 73 string = string.decode(enc, 'replace')
74 74
75 75 self._buffer.write(string)
76 76 current_time = time.time()
77 77 if self._start <= 0:
78 78 self._start = current_time
79 79 elif current_time - self._start > self.flush_interval:
80 80 self.flush()
81 81
82 82 def writelines(self, sequence):
83 83 if self.pub_socket is None:
84 84 raise ValueError('I/O operation on closed file')
85 85 else:
86 86 for string in sequence:
87 87 self.write(string)
88 88
89 89 def _new_buffer(self):
90 90 self._buffer = StringIO()
91 91 self._start = -1
General Comments 0
You need to be logged in to leave comments. Login now