Show More
The requested changes are too big and content was truncated. Show full diff
@@ -1,6 +1,8 b'' | |||
|
1 | 1 | docs/dist |
|
2 | 2 | docs/build/* |
|
3 | 3 | docs/source/api/generated |
|
4 | 4 | *.pyc |
|
5 | 5 | build |
|
6 | 6 | *.egg-info |
|
7 | *.py~ | |
|
8 | *.bak |
@@ -1,54 +1,54 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # encoding: utf-8 |
|
3 | 3 | """ |
|
4 | 4 | IPython. |
|
5 | 5 | |
|
6 | 6 | IPython is a set of tools for interactive and exploratory computing in Python. |
|
7 | 7 | """ |
|
8 | 8 | #----------------------------------------------------------------------------- |
|
9 | 9 | # Copyright (C) 2008-2009 The IPython Development Team |
|
10 | 10 | # |
|
11 | 11 | # Distributed under the terms of the BSD License. The full license is in |
|
12 | 12 | # the file COPYING, distributed as part of this software. |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | # Imports |
|
17 | 17 | #----------------------------------------------------------------------------- |
|
18 | 18 | from __future__ import absolute_import |
|
19 | 19 | |
|
20 | 20 | import os |
|
21 | 21 | import sys |
|
22 | 22 | |
|
23 | 23 | #----------------------------------------------------------------------------- |
|
24 | 24 | # Setup everything |
|
25 | 25 | #----------------------------------------------------------------------------- |
|
26 | 26 | |
|
27 | 27 | if sys.version[0:3] < '2.6': |
|
28 | 28 | raise ImportError('Python Version 2.6 or above is required for IPython.') |
|
29 | 29 | |
|
30 | 30 | |
|
31 | 31 | # Make it easy to import extensions - they are always directly on pythonpath. |
|
32 | 32 | # Therefore, non-IPython modules can be added to extensions directory. |
|
33 | 33 | # This should probably be in ipapp.py. |
|
34 | 34 | sys.path.append(os.path.join(os.path.dirname(__file__), "extensions")) |
|
35 | 35 | |
|
36 | 36 | #----------------------------------------------------------------------------- |
|
37 | 37 | # Setup the top level names |
|
38 | 38 | #----------------------------------------------------------------------------- |
|
39 | 39 | |
|
40 | 40 | from .config.loader import Config |
|
41 | 41 | from .core import release |
|
42 | 42 | from .core.application import Application |
|
43 | 43 | from .frontend.terminal.embed import embed |
|
44 | 44 | from .core.error import TryNext |
|
45 | 45 | from .core.interactiveshell import InteractiveShell |
|
46 | 46 | from .testing import test |
|
47 | 47 | |
|
48 | 48 | # Release data |
|
49 | 49 | __author__ = '' |
|
50 | for author, email in release.authors.values(): | |
|
50 | for author, email in release.authors.itervalues(): | |
|
51 | 51 | __author__ += author + ' <' + email + '>\n' |
|
52 | 52 | __license__ = release.license |
|
53 | 53 | __version__ = release.version |
|
54 | 54 | __revision__ = release.revision |
@@ -1,139 +1,139 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # encoding: utf-8 |
|
3 | 3 | """ |
|
4 | 4 | A base class for objects that are configurable. |
|
5 | 5 | |
|
6 | 6 | Authors: |
|
7 | 7 | |
|
8 | 8 | * Brian Granger |
|
9 | 9 | * Fernando Perez |
|
10 | 10 | """ |
|
11 | 11 | |
|
12 | 12 | #----------------------------------------------------------------------------- |
|
13 | 13 | # Copyright (C) 2008-2010 The IPython Development Team |
|
14 | 14 | # |
|
15 | 15 | # Distributed under the terms of the BSD License. The full license is in |
|
16 | 16 | # the file COPYING, distributed as part of this software. |
|
17 | 17 | #----------------------------------------------------------------------------- |
|
18 | 18 | |
|
19 | 19 | #----------------------------------------------------------------------------- |
|
20 | 20 | # Imports |
|
21 | 21 | #----------------------------------------------------------------------------- |
|
22 | 22 | |
|
23 | 23 | from copy import deepcopy |
|
24 | 24 | import datetime |
|
25 | 25 | from weakref import WeakValueDictionary |
|
26 | 26 | |
|
27 | 27 | from IPython.utils.importstring import import_item |
|
28 | 28 | from loader import Config |
|
29 | 29 | from IPython.utils.traitlets import HasTraits, Instance |
|
30 | 30 | |
|
31 | 31 | |
|
32 | 32 | #----------------------------------------------------------------------------- |
|
33 | 33 | # Helper classes for Configurables |
|
34 | 34 | #----------------------------------------------------------------------------- |
|
35 | 35 | |
|
36 | 36 | |
|
37 | 37 | class ConfigurableError(Exception): |
|
38 | 38 | pass |
|
39 | 39 | |
|
40 | 40 | |
|
41 | 41 | #----------------------------------------------------------------------------- |
|
42 | 42 | # Configurable implementation |
|
43 | 43 | #----------------------------------------------------------------------------- |
|
44 | 44 | |
|
45 | 45 | |
|
46 | 46 | class Configurable(HasTraits): |
|
47 | 47 | |
|
48 | 48 | config = Instance(Config,(),{}) |
|
49 | 49 | created = None |
|
50 | 50 | |
|
51 | 51 | def __init__(self, **kwargs): |
|
52 | 52 | """Create a conigurable given a config config. |
|
53 | 53 | |
|
54 | 54 | Parameters |
|
55 | 55 | ---------- |
|
56 | 56 | config : Config |
|
57 | 57 | If this is empty, default values are used. If config is a |
|
58 | 58 | :class:`Config` instance, it will be used to configure the |
|
59 | 59 | instance. |
|
60 | 60 | |
|
61 | 61 | Notes |
|
62 | 62 | ----- |
|
63 | 63 | Subclasses of Configurable must call the :meth:`__init__` method of |
|
64 | 64 | :class:`Configurable` *before* doing anything else and using |
|
65 | 65 | :func:`super`:: |
|
66 | 66 | |
|
67 | 67 | class MyConfigurable(Configurable): |
|
68 | 68 | def __init__(self, config=None): |
|
69 | 69 | super(MyConfigurable, self).__init__(config) |
|
70 | 70 | # Then any other code you need to finish initialization. |
|
71 | 71 | |
|
72 | 72 | This ensures that instances will be configured properly. |
|
73 | 73 | """ |
|
74 | 74 | config = kwargs.pop('config', None) |
|
75 | 75 | if config is not None: |
|
76 | 76 | # We used to deepcopy, but for now we are trying to just save |
|
77 | 77 | # by reference. This *could* have side effects as all components |
|
78 | 78 | # will share config. In fact, I did find such a side effect in |
|
79 | 79 | # _config_changed below. If a config attribute value was a mutable type |
|
80 | 80 | # all instances of a component were getting the same copy, effectively |
|
81 | 81 | # making that a class attribute. |
|
82 | 82 | # self.config = deepcopy(config) |
|
83 | 83 | self.config = config |
|
84 | 84 | # This should go second so individual keyword arguments override |
|
85 | 85 | # the values in config. |
|
86 | 86 | super(Configurable, self).__init__(**kwargs) |
|
87 | 87 | self.created = datetime.datetime.now() |
|
88 | 88 | |
|
89 | 89 | #------------------------------------------------------------------------- |
|
90 | 90 | # Static trait notifiations |
|
91 | 91 | #------------------------------------------------------------------------- |
|
92 | 92 | |
|
93 | 93 | def _config_changed(self, name, old, new): |
|
94 | 94 | """Update all the class traits having ``config=True`` as metadata. |
|
95 | 95 | |
|
96 | 96 | For any class trait with a ``config`` metadata attribute that is |
|
97 | 97 | ``True``, we update the trait with the value of the corresponding |
|
98 | 98 | config entry. |
|
99 | 99 | """ |
|
100 | 100 | # Get all traits with a config metadata entry that is True |
|
101 | 101 | traits = self.traits(config=True) |
|
102 | 102 | |
|
103 | 103 | # We auto-load config section for this class as well as any parent |
|
104 | 104 | # classes that are Configurable subclasses. This starts with Configurable |
|
105 | 105 | # and works down the mro loading the config for each section. |
|
106 | 106 | section_names = [cls.__name__ for cls in \ |
|
107 | 107 | reversed(self.__class__.__mro__) if |
|
108 | 108 | issubclass(cls, Configurable) and issubclass(self.__class__, cls)] |
|
109 | 109 | |
|
110 | 110 | for sname in section_names: |
|
111 | 111 | # Don't do a blind getattr as that would cause the config to |
|
112 | 112 | # dynamically create the section with name self.__class__.__name__. |
|
113 | 113 | if new._has_section(sname): |
|
114 | 114 | my_config = new[sname] |
|
115 | for k, v in traits.items(): | |
|
115 | for k, v in traits.iteritems(): | |
|
116 | 116 | # Don't allow traitlets with config=True to start with |
|
117 | 117 | # uppercase. Otherwise, they are confused with Config |
|
118 | 118 | # subsections. But, developers shouldn't have uppercase |
|
119 | 119 | # attributes anyways! (PEP 6) |
|
120 | 120 | if k[0].upper()==k[0] and not k.startswith('_'): |
|
121 | 121 | raise ConfigurableError('Configurable traitlets with ' |
|
122 | 122 | 'config=True must start with a lowercase so they are ' |
|
123 | 123 | 'not confused with Config subsections: %s.%s' % \ |
|
124 | 124 | (self.__class__.__name__, k)) |
|
125 | 125 | try: |
|
126 | 126 | # Here we grab the value from the config |
|
127 | 127 | # If k has the naming convention of a config |
|
128 | 128 | # section, it will be auto created. |
|
129 | 129 | config_value = my_config[k] |
|
130 | 130 | except KeyError: |
|
131 | 131 | pass |
|
132 | 132 | else: |
|
133 | 133 | # print "Setting %s.%s from %s.%s=%r" % \ |
|
134 | 134 | # (self.__class__.__name__,k,sname,k,config_value) |
|
135 | 135 | # We have to do a deepcopy here if we don't deepcopy the entire |
|
136 | 136 | # config object. If we don't, a mutable config_value will be |
|
137 | 137 | # shared by all instances, effectively making it a class attribute. |
|
138 | 138 | setattr(self, k, deepcopy(config_value)) |
|
139 | 139 |
@@ -1,370 +1,373 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | 2 | # coding: utf-8 |
|
2 | 3 | """A simple configuration system. |
|
3 | 4 | |
|
4 | 5 | Authors |
|
5 | 6 | ------- |
|
6 | 7 | * Brian Granger |
|
7 | 8 | * Fernando Perez |
|
8 | 9 | """ |
|
9 | 10 | |
|
10 | 11 | #----------------------------------------------------------------------------- |
|
11 | 12 | # Copyright (C) 2008-2009 The IPython Development Team |
|
12 | 13 | # |
|
13 | 14 | # Distributed under the terms of the BSD License. The full license is in |
|
14 | 15 | # the file COPYING, distributed as part of this software. |
|
15 | 16 | #----------------------------------------------------------------------------- |
|
16 | 17 | |
|
17 | 18 | #----------------------------------------------------------------------------- |
|
18 | 19 | # Imports |
|
19 | 20 | #----------------------------------------------------------------------------- |
|
20 | 21 | |
|
21 | 22 | import __builtin__ |
|
22 | 23 | import os |
|
23 | 24 | import sys |
|
24 | 25 | |
|
25 | 26 | from IPython.external import argparse |
|
26 | 27 | from IPython.utils.path import filefind |
|
27 | 28 | |
|
28 | 29 | #----------------------------------------------------------------------------- |
|
29 | 30 | # Exceptions |
|
30 | 31 | #----------------------------------------------------------------------------- |
|
31 | 32 | |
|
32 | 33 | |
|
33 | 34 | class ConfigError(Exception): |
|
34 | 35 | pass |
|
35 | 36 | |
|
36 | 37 | |
|
37 | 38 | class ConfigLoaderError(ConfigError): |
|
38 | 39 | pass |
|
39 | 40 | |
|
40 | 41 | #----------------------------------------------------------------------------- |
|
41 | 42 | # Argparse fix |
|
42 | 43 | #----------------------------------------------------------------------------- |
|
43 | 44 | |
|
44 | 45 | # Unfortunately argparse by default prints help messages to stderr instead of |
|
45 | 46 | # stdout. This makes it annoying to capture long help screens at the command |
|
46 | 47 | # line, since one must know how to pipe stderr, which many users don't know how |
|
47 | 48 | # to do. So we override the print_help method with one that defaults to |
|
48 | 49 | # stdout and use our class instead. |
|
49 | 50 | |
|
50 | 51 | class ArgumentParser(argparse.ArgumentParser): |
|
51 | 52 | """Simple argparse subclass that prints help to stdout by default.""" |
|
52 | 53 | |
|
53 | 54 | def print_help(self, file=None): |
|
54 | 55 | if file is None: |
|
55 | 56 | file = sys.stdout |
|
56 | 57 | return super(ArgumentParser, self).print_help(file) |
|
57 | 58 | |
|
58 | 59 | print_help.__doc__ = argparse.ArgumentParser.print_help.__doc__ |
|
59 | 60 | |
|
60 | 61 | #----------------------------------------------------------------------------- |
|
61 | 62 | # Config class for holding config information |
|
62 | 63 | #----------------------------------------------------------------------------- |
|
63 | 64 | |
|
64 | 65 | |
|
65 | 66 | class Config(dict): |
|
66 | 67 | """An attribute based dict that can do smart merges.""" |
|
67 | 68 | |
|
68 | 69 | def __init__(self, *args, **kwds): |
|
69 | 70 | dict.__init__(self, *args, **kwds) |
|
70 | 71 | # This sets self.__dict__ = self, but it has to be done this way |
|
71 | 72 | # because we are also overriding __setattr__. |
|
72 | 73 | dict.__setattr__(self, '__dict__', self) |
|
73 | 74 | |
|
74 | 75 | def _merge(self, other): |
|
75 | 76 | to_update = {} |
|
76 | for k, v in other.items(): | |
|
77 | for k, v in other.iteritems(): | |
|
77 | 78 | if not self.has_key(k): |
|
78 | 79 | to_update[k] = v |
|
79 | 80 | else: # I have this key |
|
80 | 81 | if isinstance(v, Config): |
|
81 | 82 | # Recursively merge common sub Configs |
|
82 | 83 | self[k]._merge(v) |
|
83 | 84 | else: |
|
84 | 85 | # Plain updates for non-Configs |
|
85 | 86 | to_update[k] = v |
|
86 | 87 | |
|
87 | 88 | self.update(to_update) |
|
88 | 89 | |
|
89 | 90 | def _is_section_key(self, key): |
|
90 | 91 | if key[0].upper()==key[0] and not key.startswith('_'): |
|
91 | 92 | return True |
|
92 | 93 | else: |
|
93 | 94 | return False |
|
94 | 95 | |
|
95 |
def |
|
|
96 | def __contains__(self, key): | |
|
96 | 97 | if self._is_section_key(key): |
|
97 | 98 | return True |
|
98 | 99 | else: |
|
99 | return dict.has_key(self, key) | |
|
100 | return super(Config, self).__contains__(key) | |
|
101 | # .has_key is deprecated for dictionaries. | |
|
102 | has_key = __contains__ | |
|
100 | 103 | |
|
101 | 104 | def _has_section(self, key): |
|
102 | 105 | if self._is_section_key(key): |
|
103 | if dict.has_key(self, key): | |
|
106 | if super(Config, self).__contains__(key): | |
|
104 | 107 | return True |
|
105 | 108 | return False |
|
106 | 109 | |
|
107 | 110 | def copy(self): |
|
108 | 111 | return type(self)(dict.copy(self)) |
|
109 | 112 | |
|
110 | 113 | def __copy__(self): |
|
111 | 114 | return self.copy() |
|
112 | 115 | |
|
113 | 116 | def __deepcopy__(self, memo): |
|
114 | 117 | import copy |
|
115 | 118 | return type(self)(copy.deepcopy(self.items())) |
|
116 | 119 | |
|
117 | 120 | def __getitem__(self, key): |
|
118 | 121 | # Because we use this for an exec namespace, we need to delegate |
|
119 | 122 | # the lookup of names in __builtin__ to itself. This means |
|
120 | 123 | # that you can't have section or attribute names that are |
|
121 | 124 | # builtins. |
|
122 | 125 | try: |
|
123 | 126 | return getattr(__builtin__, key) |
|
124 | 127 | except AttributeError: |
|
125 | 128 | pass |
|
126 | 129 | if self._is_section_key(key): |
|
127 | 130 | try: |
|
128 | 131 | return dict.__getitem__(self, key) |
|
129 | 132 | except KeyError: |
|
130 | 133 | c = Config() |
|
131 | 134 | dict.__setitem__(self, key, c) |
|
132 | 135 | return c |
|
133 | 136 | else: |
|
134 | 137 | return dict.__getitem__(self, key) |
|
135 | 138 | |
|
136 | 139 | def __setitem__(self, key, value): |
|
137 | 140 | # Don't allow names in __builtin__ to be modified. |
|
138 | 141 | if hasattr(__builtin__, key): |
|
139 | 142 | raise ConfigError('Config variable names cannot have the same name ' |
|
140 | 143 | 'as a Python builtin: %s' % key) |
|
141 | 144 | if self._is_section_key(key): |
|
142 | 145 | if not isinstance(value, Config): |
|
143 | 146 | raise ValueError('values whose keys begin with an uppercase ' |
|
144 | 147 | 'char must be Config instances: %r, %r' % (key, value)) |
|
145 | 148 | else: |
|
146 | 149 | dict.__setitem__(self, key, value) |
|
147 | 150 | |
|
148 | 151 | def __getattr__(self, key): |
|
149 | 152 | try: |
|
150 | 153 | return self.__getitem__(key) |
|
151 | 154 | except KeyError, e: |
|
152 | 155 | raise AttributeError(e) |
|
153 | 156 | |
|
154 | 157 | def __setattr__(self, key, value): |
|
155 | 158 | try: |
|
156 | 159 | self.__setitem__(key, value) |
|
157 | 160 | except KeyError, e: |
|
158 | 161 | raise AttributeError(e) |
|
159 | 162 | |
|
160 | 163 | def __delattr__(self, key): |
|
161 | 164 | try: |
|
162 | 165 | dict.__delitem__(self, key) |
|
163 | 166 | except KeyError, e: |
|
164 | 167 | raise AttributeError(e) |
|
165 | 168 | |
|
166 | 169 | |
|
167 | 170 | #----------------------------------------------------------------------------- |
|
168 | 171 | # Config loading classes |
|
169 | 172 | #----------------------------------------------------------------------------- |
|
170 | 173 | |
|
171 | 174 | |
|
172 | 175 | class ConfigLoader(object): |
|
173 | 176 | """A object for loading configurations from just about anywhere. |
|
174 | 177 | |
|
175 | 178 | The resulting configuration is packaged as a :class:`Struct`. |
|
176 | 179 | |
|
177 | 180 | Notes |
|
178 | 181 | ----- |
|
179 | 182 | A :class:`ConfigLoader` does one thing: load a config from a source |
|
180 | 183 | (file, command line arguments) and returns the data as a :class:`Struct`. |
|
181 | 184 | There are lots of things that :class:`ConfigLoader` does not do. It does |
|
182 | 185 | not implement complex logic for finding config files. It does not handle |
|
183 | 186 | default values or merge multiple configs. These things need to be |
|
184 | 187 | handled elsewhere. |
|
185 | 188 | """ |
|
186 | 189 | |
|
187 | 190 | def __init__(self): |
|
188 | 191 | """A base class for config loaders. |
|
189 | 192 | |
|
190 | 193 | Examples |
|
191 | 194 | -------- |
|
192 | 195 | |
|
193 | 196 | >>> cl = ConfigLoader() |
|
194 | 197 | >>> config = cl.load_config() |
|
195 | 198 | >>> config |
|
196 | 199 | {} |
|
197 | 200 | """ |
|
198 | 201 | self.clear() |
|
199 | 202 | |
|
200 | 203 | def clear(self): |
|
201 | 204 | self.config = Config() |
|
202 | 205 | |
|
203 | 206 | def load_config(self): |
|
204 | 207 | """Load a config from somewhere, return a :class:`Config` instance. |
|
205 | 208 | |
|
206 | 209 | Usually, this will cause self.config to be set and then returned. |
|
207 | 210 | However, in most cases, :meth:`ConfigLoader.clear` should be called |
|
208 | 211 | to erase any previous state. |
|
209 | 212 | """ |
|
210 | 213 | self.clear() |
|
211 | 214 | return self.config |
|
212 | 215 | |
|
213 | 216 | |
|
214 | 217 | class FileConfigLoader(ConfigLoader): |
|
215 | 218 | """A base class for file based configurations. |
|
216 | 219 | |
|
217 | 220 | As we add more file based config loaders, the common logic should go |
|
218 | 221 | here. |
|
219 | 222 | """ |
|
220 | 223 | pass |
|
221 | 224 | |
|
222 | 225 | |
|
223 | 226 | class PyFileConfigLoader(FileConfigLoader): |
|
224 | 227 | """A config loader for pure python files. |
|
225 | 228 | |
|
226 | 229 | This calls execfile on a plain python file and looks for attributes |
|
227 | 230 | that are all caps. These attribute are added to the config Struct. |
|
228 | 231 | """ |
|
229 | 232 | |
|
230 | 233 | def __init__(self, filename, path=None): |
|
231 | 234 | """Build a config loader for a filename and path. |
|
232 | 235 | |
|
233 | 236 | Parameters |
|
234 | 237 | ---------- |
|
235 | 238 | filename : str |
|
236 | 239 | The file name of the config file. |
|
237 | 240 | path : str, list, tuple |
|
238 | 241 | The path to search for the config file on, or a sequence of |
|
239 | 242 | paths to try in order. |
|
240 | 243 | """ |
|
241 | 244 | super(PyFileConfigLoader, self).__init__() |
|
242 | 245 | self.filename = filename |
|
243 | 246 | self.path = path |
|
244 | 247 | self.full_filename = '' |
|
245 | 248 | self.data = None |
|
246 | 249 | |
|
247 | 250 | def load_config(self): |
|
248 | 251 | """Load the config from a file and return it as a Struct.""" |
|
249 | 252 | self.clear() |
|
250 | 253 | self._find_file() |
|
251 | 254 | self._read_file_as_dict() |
|
252 | 255 | self._convert_to_config() |
|
253 | 256 | return self.config |
|
254 | 257 | |
|
255 | 258 | def _find_file(self): |
|
256 | 259 | """Try to find the file by searching the paths.""" |
|
257 | 260 | self.full_filename = filefind(self.filename, self.path) |
|
258 | 261 | |
|
259 | 262 | def _read_file_as_dict(self): |
|
260 | 263 | """Load the config file into self.config, with recursive loading.""" |
|
261 | 264 | # This closure is made available in the namespace that is used |
|
262 | 265 | # to exec the config file. This allows users to call |
|
263 | 266 | # load_subconfig('myconfig.py') to load config files recursively. |
|
264 | 267 | # It needs to be a closure because it has references to self.path |
|
265 | 268 | # and self.config. The sub-config is loaded with the same path |
|
266 | 269 | # as the parent, but it uses an empty config which is then merged |
|
267 | 270 | # with the parents. |
|
268 | 271 | def load_subconfig(fname): |
|
269 | 272 | loader = PyFileConfigLoader(fname, self.path) |
|
270 | 273 | try: |
|
271 | 274 | sub_config = loader.load_config() |
|
272 | 275 | except IOError: |
|
273 | 276 | # Pass silently if the sub config is not there. This happens |
|
274 | 277 | # when a user us using a profile, but not the default config. |
|
275 | 278 | pass |
|
276 | 279 | else: |
|
277 | 280 | self.config._merge(sub_config) |
|
278 | 281 | |
|
279 | 282 | # Again, this needs to be a closure and should be used in config |
|
280 | 283 | # files to get the config being loaded. |
|
281 | 284 | def get_config(): |
|
282 | 285 | return self.config |
|
283 | 286 | |
|
284 | 287 | namespace = dict(load_subconfig=load_subconfig, get_config=get_config) |
|
285 | 288 | execfile(self.full_filename, namespace) |
|
286 | 289 | |
|
287 | 290 | def _convert_to_config(self): |
|
288 | 291 | if self.data is None: |
|
289 | 292 | ConfigLoaderError('self.data does not exist') |
|
290 | 293 | |
|
291 | 294 | |
|
292 | 295 | class CommandLineConfigLoader(ConfigLoader): |
|
293 | 296 | """A config loader for command line arguments. |
|
294 | 297 | |
|
295 | 298 | As we add more command line based loaders, the common logic should go |
|
296 | 299 | here. |
|
297 | 300 | """ |
|
298 | 301 | |
|
299 | 302 | |
|
300 | 303 | class ArgParseConfigLoader(CommandLineConfigLoader): |
|
301 | 304 | |
|
302 | 305 | def __init__(self, argv=None, *parser_args, **parser_kw): |
|
303 | 306 | """Create a config loader for use with argparse. |
|
304 | 307 | |
|
305 | 308 | Parameters |
|
306 | 309 | ---------- |
|
307 | 310 | |
|
308 | 311 | argv : optional, list |
|
309 | 312 | If given, used to read command-line arguments from, otherwise |
|
310 | 313 | sys.argv[1:] is used. |
|
311 | 314 | |
|
312 | 315 | parser_args : tuple |
|
313 | 316 | A tuple of positional arguments that will be passed to the |
|
314 | 317 | constructor of :class:`argparse.ArgumentParser`. |
|
315 | 318 | |
|
316 | 319 | parser_kw : dict |
|
317 | 320 | A tuple of keyword arguments that will be passed to the |
|
318 | 321 | constructor of :class:`argparse.ArgumentParser`. |
|
319 | 322 | """ |
|
320 | 323 | super(CommandLineConfigLoader, self).__init__() |
|
321 | 324 | if argv == None: |
|
322 | 325 | argv = sys.argv[1:] |
|
323 | 326 | self.argv = argv |
|
324 | 327 | self.parser_args = parser_args |
|
325 | 328 | kwargs = dict(argument_default=argparse.SUPPRESS) |
|
326 | 329 | kwargs.update(parser_kw) |
|
327 | 330 | self.parser_kw = kwargs |
|
328 | 331 | |
|
329 | 332 | def load_config(self, args=None): |
|
330 | 333 | """Parse command line arguments and return as a Struct. |
|
331 | 334 | |
|
332 | 335 | Parameters |
|
333 | 336 | ---------- |
|
334 | 337 | |
|
335 | 338 | args : optional, list |
|
336 | 339 | If given, a list with the structure of sys.argv[1:] to parse |
|
337 | 340 | arguments from. If not given, the instance's self.argv attribute |
|
338 | 341 | (given at construction time) is used.""" |
|
339 | 342 | self.clear() |
|
340 | 343 | if args is None: |
|
341 | 344 | args = self.argv |
|
342 | 345 | self._create_parser() |
|
343 | 346 | self._parse_args(args) |
|
344 | 347 | self._convert_to_config() |
|
345 | 348 | return self.config |
|
346 | 349 | |
|
347 | 350 | def get_extra_args(self): |
|
348 | 351 | if hasattr(self, 'extra_args'): |
|
349 | 352 | return self.extra_args |
|
350 | 353 | else: |
|
351 | 354 | return [] |
|
352 | 355 | |
|
353 | 356 | def _create_parser(self): |
|
354 | 357 | self.parser = ArgumentParser(*self.parser_args, **self.parser_kw) |
|
355 | 358 | self._add_arguments() |
|
356 | 359 | |
|
357 | 360 | def _add_arguments(self): |
|
358 | 361 | raise NotImplementedError("subclasses must implement _add_arguments") |
|
359 | 362 | |
|
360 | 363 | def _parse_args(self, args): |
|
361 | 364 | """self.parser->self.parsed_data""" |
|
362 | 365 | self.parsed_data, self.extra_args = self.parser.parse_known_args(args) |
|
363 | 366 | |
|
364 | 367 | def _convert_to_config(self): |
|
365 | 368 | """self.parsed_data->self.config""" |
|
366 | for k, v in vars(self.parsed_data).items(): | |
|
369 | for k, v in vars(self.parsed_data).iteritems(): | |
|
367 | 370 | exec_str = 'self.config.' + k + '= v' |
|
368 | 371 | exec exec_str in locals(), globals() |
|
369 | 372 | |
|
370 | 373 |
@@ -1,346 +1,346 b'' | |||
|
1 | 1 | """Implementations for various useful completers. |
|
2 | 2 | |
|
3 | 3 | These are all loaded by default by IPython. |
|
4 | 4 | """ |
|
5 | 5 | #----------------------------------------------------------------------------- |
|
6 | 6 | # Copyright (C) 2010 The IPython Development Team. |
|
7 | 7 | # |
|
8 | 8 | # Distributed under the terms of the BSD License. |
|
9 | 9 | # |
|
10 | 10 | # The full license is in the file COPYING.txt, distributed with this software. |
|
11 | 11 | #----------------------------------------------------------------------------- |
|
12 | 12 | |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | # Imports |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | from __future__ import print_function |
|
17 | 17 | |
|
18 | 18 | # Stdlib imports |
|
19 | 19 | import glob |
|
20 | 20 | import inspect |
|
21 | 21 | import os |
|
22 | 22 | import re |
|
23 | 23 | import shlex |
|
24 | 24 | import sys |
|
25 | 25 | |
|
26 | 26 | # Third-party imports |
|
27 | 27 | from time import time |
|
28 | 28 | from zipimport import zipimporter |
|
29 | 29 | |
|
30 | 30 | # Our own imports |
|
31 | 31 | from IPython.core.completer import expand_user, compress_user |
|
32 | 32 | from IPython.core.error import TryNext |
|
33 | 33 | |
|
34 | 34 | # FIXME: this should be pulled in with the right call via the component system |
|
35 | 35 | from IPython.core.ipapi import get as get_ipython |
|
36 | 36 | |
|
37 | 37 | #----------------------------------------------------------------------------- |
|
38 | 38 | # Globals and constants |
|
39 | 39 | #----------------------------------------------------------------------------- |
|
40 | 40 | |
|
41 | 41 | # Time in seconds after which the rootmodules will be stored permanently in the |
|
42 | 42 | # ipython ip.db database (kept in the user's .ipython dir). |
|
43 | 43 | TIMEOUT_STORAGE = 2 |
|
44 | 44 | |
|
45 | 45 | # Time in seconds after which we give up |
|
46 | 46 | TIMEOUT_GIVEUP = 20 |
|
47 | 47 | |
|
48 | 48 | # Regular expression for the python import statement |
|
49 | 49 | import_re = re.compile(r'.*(\.so|\.py[cod]?)$') |
|
50 | 50 | |
|
51 | 51 | # RE for the ipython %run command (python + ipython scripts) |
|
52 | 52 | magic_run_re = re.compile(r'.*(\.ipy|\.py[w]?)$') |
|
53 | 53 | |
|
54 | 54 | #----------------------------------------------------------------------------- |
|
55 | 55 | # Local utilities |
|
56 | 56 | #----------------------------------------------------------------------------- |
|
57 | 57 | |
|
58 | 58 | def shlex_split(x): |
|
59 | 59 | """Helper function to split lines into segments. |
|
60 | 60 | """ |
|
61 | 61 | # shlex.split raises an exception if there is a syntax error in sh syntax |
|
62 | 62 | # for example if no closing " is found. This function keeps dropping the |
|
63 | 63 | # last character of the line until shlex.split does not raise |
|
64 | 64 | # an exception. It adds end of the line to the result of shlex.split |
|
65 | 65 | # |
|
66 | 66 | # Example: |
|
67 | 67 | # %run "c:/python -> ['%run','"c:/python'] |
|
68 | 68 | |
|
69 | 69 | # shlex.split has unicode bugs, so encode first to str |
|
70 | 70 | if isinstance(x, unicode): |
|
71 | 71 | x = x.encode(sys.stdin.encoding) |
|
72 | 72 | |
|
73 | 73 | endofline = [] |
|
74 | 74 | while x != '': |
|
75 | 75 | try: |
|
76 | 76 | comps = shlex.split(x) |
|
77 | 77 | if len(endofline) >= 1: |
|
78 | 78 | comps.append(''.join(endofline)) |
|
79 | 79 | return comps |
|
80 | 80 | |
|
81 | 81 | except ValueError: |
|
82 | 82 | endofline = [x[-1:]]+endofline |
|
83 | 83 | x = x[:-1] |
|
84 | 84 | |
|
85 | 85 | return [''.join(endofline)] |
|
86 | 86 | |
|
87 | 87 | def module_list(path): |
|
88 | 88 | """ |
|
89 | 89 | Return the list containing the names of the modules available in the given |
|
90 | 90 | folder. |
|
91 | 91 | """ |
|
92 | 92 | |
|
93 | 93 | if os.path.isdir(path): |
|
94 | 94 | folder_list = os.listdir(path) |
|
95 | 95 | elif path.endswith('.egg'): |
|
96 | 96 | try: |
|
97 | 97 | folder_list = [f for f in zipimporter(path)._files] |
|
98 | 98 | except: |
|
99 | 99 | folder_list = [] |
|
100 | 100 | else: |
|
101 | 101 | folder_list = [] |
|
102 | 102 | |
|
103 | 103 | if not folder_list: |
|
104 | 104 | return [] |
|
105 | 105 | |
|
106 | 106 | # A few local constants to be used in loops below |
|
107 | 107 | isfile = os.path.isfile |
|
108 | 108 | pjoin = os.path.join |
|
109 | 109 | basename = os.path.basename |
|
110 | 110 | |
|
111 | 111 | # Now find actual path matches for packages or modules |
|
112 | 112 | folder_list = [p for p in folder_list |
|
113 | 113 | if isfile(pjoin(path, p,'__init__.py')) |
|
114 | 114 | or import_re.match(p) ] |
|
115 | 115 | |
|
116 | 116 | return [basename(p).split('.')[0] for p in folder_list] |
|
117 | 117 | |
|
118 | 118 | def get_root_modules(): |
|
119 | 119 | """ |
|
120 | 120 | Returns a list containing the names of all the modules available in the |
|
121 | 121 | folders of the pythonpath. |
|
122 | 122 | """ |
|
123 | 123 | ip = get_ipython() |
|
124 | 124 | |
|
125 | 125 | if 'rootmodules' in ip.db: |
|
126 | 126 | return ip.db['rootmodules'] |
|
127 | 127 | |
|
128 | 128 | t = time() |
|
129 | 129 | store = False |
|
130 | 130 | modules = list(sys.builtin_module_names) |
|
131 | 131 | for path in sys.path: |
|
132 | 132 | modules += module_list(path) |
|
133 | 133 | if time() - t >= TIMEOUT_STORAGE and not store: |
|
134 | 134 | store = True |
|
135 | 135 | print("\nCaching the list of root modules, please wait!") |
|
136 | 136 | print("(This will only be done once - type '%rehashx' to " |
|
137 | 137 | "reset cache!)\n") |
|
138 | 138 | sys.stdout.flush() |
|
139 | 139 | if time() - t > TIMEOUT_GIVEUP: |
|
140 | 140 | print("This is taking too long, we give up.\n") |
|
141 | 141 | ip.db['rootmodules'] = [] |
|
142 | 142 | return [] |
|
143 | 143 | |
|
144 | 144 | modules = set(modules) |
|
145 | 145 | if '__init__' in modules: |
|
146 | 146 | modules.remove('__init__') |
|
147 | 147 | modules = list(modules) |
|
148 | 148 | if store: |
|
149 | 149 | ip.db['rootmodules'] = modules |
|
150 | 150 | return modules |
|
151 | 151 | |
|
152 | 152 | |
|
153 | 153 | def is_importable(module, attr, only_modules): |
|
154 | 154 | if only_modules: |
|
155 | 155 | return inspect.ismodule(getattr(module, attr)) |
|
156 | 156 | else: |
|
157 | 157 | return not(attr[:2] == '__' and attr[-2:] == '__') |
|
158 | 158 | |
|
159 | 159 | |
|
160 | 160 | def try_import(mod, only_modules=False): |
|
161 | 161 | try: |
|
162 | 162 | m = __import__(mod) |
|
163 | 163 | except: |
|
164 | 164 | return [] |
|
165 | 165 | mods = mod.split('.') |
|
166 | 166 | for module in mods[1:]: |
|
167 | 167 | m = getattr(m, module) |
|
168 | 168 | |
|
169 | 169 | m_is_init = hasattr(m, '__file__') and '__init__' in m.__file__ |
|
170 | 170 | |
|
171 | 171 | completions = [] |
|
172 | 172 | if (not hasattr(m, '__file__')) or (not only_modules) or m_is_init: |
|
173 | 173 | completions.extend( [attr for attr in dir(m) if |
|
174 | 174 | is_importable(m, attr, only_modules)]) |
|
175 | 175 | |
|
176 | 176 | completions.extend(getattr(m, '__all__', [])) |
|
177 | 177 | if m_is_init: |
|
178 | 178 | completions.extend(module_list(os.path.dirname(m.__file__))) |
|
179 | 179 | completions = set(completions) |
|
180 | 180 | if '__init__' in completions: |
|
181 | 181 | completions.remove('__init__') |
|
182 | 182 | return list(completions) |
|
183 | 183 | |
|
184 | 184 | |
|
185 | 185 | #----------------------------------------------------------------------------- |
|
186 | 186 | # Completion-related functions. |
|
187 | 187 | #----------------------------------------------------------------------------- |
|
188 | 188 | |
|
189 | 189 | def quick_completer(cmd, completions): |
|
190 | 190 | """ Easily create a trivial completer for a command. |
|
191 | 191 | |
|
192 | 192 | Takes either a list of completions, or all completions in string (that will |
|
193 | 193 | be split on whitespace). |
|
194 | 194 | |
|
195 | 195 | Example:: |
|
196 | 196 | |
|
197 | 197 | [d:\ipython]|1> import ipy_completers |
|
198 | 198 | [d:\ipython]|2> ipy_completers.quick_completer('foo', ['bar','baz']) |
|
199 | 199 | [d:\ipython]|3> foo b<TAB> |
|
200 | 200 | bar baz |
|
201 | 201 | [d:\ipython]|3> foo ba |
|
202 | 202 | """ |
|
203 | 203 | |
|
204 | 204 | if isinstance(completions, basestring): |
|
205 | 205 | completions = completions.split() |
|
206 | 206 | |
|
207 | 207 | def do_complete(self, event): |
|
208 | 208 | return completions |
|
209 | 209 | |
|
210 | 210 | get_ipython().set_hook('complete_command',do_complete, str_key = cmd) |
|
211 | 211 | |
|
212 | 212 | |
|
213 | 213 | def module_completion(line): |
|
214 | 214 | """ |
|
215 | 215 | Returns a list containing the completion possibilities for an import line. |
|
216 | 216 | |
|
217 | 217 | The line looks like this : |
|
218 | 218 | 'import xml.d' |
|
219 | 219 | 'from xml.dom import' |
|
220 | 220 | """ |
|
221 | 221 | |
|
222 | 222 | words = line.split(' ') |
|
223 | 223 | nwords = len(words) |
|
224 | 224 | |
|
225 | 225 | # from whatever <tab> -> 'import ' |
|
226 | 226 | if nwords == 3 and words[0] == 'from': |
|
227 | 227 | return ['import '] |
|
228 | 228 | |
|
229 | 229 | # 'from xy<tab>' or 'import xy<tab>' |
|
230 | 230 | if nwords < 3 and (words[0] in ['import','from']) : |
|
231 | 231 | if nwords == 1: |
|
232 | 232 | return get_root_modules() |
|
233 | 233 | mod = words[1].split('.') |
|
234 | 234 | if len(mod) < 2: |
|
235 | 235 | return get_root_modules() |
|
236 | 236 | completion_list = try_import('.'.join(mod[:-1]), True) |
|
237 | 237 | return ['.'.join(mod[:-1] + [el]) for el in completion_list] |
|
238 | 238 | |
|
239 | 239 | # 'from xyz import abc<tab>' |
|
240 | 240 | if nwords >= 3 and words[0] == 'from': |
|
241 | 241 | mod = words[1] |
|
242 | 242 | return try_import(mod) |
|
243 | 243 | |
|
244 | 244 | #----------------------------------------------------------------------------- |
|
245 | 245 | # Completers |
|
246 | 246 | #----------------------------------------------------------------------------- |
|
247 | 247 | # These all have the func(self, event) signature to be used as custom |
|
248 | 248 | # completers |
|
249 | 249 | |
|
250 | 250 | def module_completer(self,event): |
|
251 | 251 | """Give completions after user has typed 'import ...' or 'from ...'""" |
|
252 | 252 | |
|
253 | 253 | # This works in all versions of python. While 2.5 has |
|
254 | 254 | # pkgutil.walk_packages(), that particular routine is fairly dangerous, |
|
255 | 255 | # since it imports *EVERYTHING* on sys.path. That is: a) very slow b) full |
|
256 | 256 | # of possibly problematic side effects. |
|
257 | 257 | # This search the folders in the sys.path for available modules. |
|
258 | 258 | |
|
259 | 259 | return module_completion(event.line) |
|
260 | 260 | |
|
261 | 261 | # FIXME: there's a lot of logic common to the run, cd and builtin file |
|
262 | 262 | # completers, that is currently reimplemented in each. |
|
263 | 263 | |
|
264 | 264 | def magic_run_completer(self, event): |
|
265 | 265 | """Complete files that end in .py or .ipy for the %run command. |
|
266 | 266 | """ |
|
267 | 267 | comps = shlex_split(event.line) |
|
268 | 268 | relpath = (len(comps) > 1 and comps[-1] or '').strip("'\"") |
|
269 | 269 | |
|
270 | 270 | #print("\nev=", event) # dbg |
|
271 | 271 | #print("rp=", relpath) # dbg |
|
272 | 272 | #print('comps=', comps) # dbg |
|
273 | 273 | |
|
274 | 274 | lglob = glob.glob |
|
275 | 275 | isdir = os.path.isdir |
|
276 | 276 | relpath, tilde_expand, tilde_val = expand_user(relpath) |
|
277 | 277 | |
|
278 | 278 | dirs = [f.replace('\\','/') + "/" for f in lglob(relpath+'*') if isdir(f)] |
|
279 | 279 | |
|
280 | 280 | # Find if the user has already typed the first filename, after which we |
|
281 | 281 | # should complete on all files, since after the first one other files may |
|
282 | 282 | # be arguments to the input script. |
|
283 | 283 | |
|
284 | 284 | if filter(magic_run_re.match, comps): |
|
285 | 285 | pys = [f.replace('\\','/') for f in lglob('*')] |
|
286 | 286 | else: |
|
287 | 287 | pys = [f.replace('\\','/') |
|
288 | 288 | for f in lglob(relpath+'*.py') + lglob(relpath+'*.ipy') + |
|
289 | 289 | lglob(relpath + '*.pyw')] |
|
290 | 290 | #print('run comp:', dirs+pys) # dbg |
|
291 | 291 | return [compress_user(p, tilde_expand, tilde_val) for p in dirs+pys] |
|
292 | 292 | |
|
293 | 293 | |
|
294 | 294 | def cd_completer(self, event): |
|
295 | 295 | """Completer function for cd, which only returns directories.""" |
|
296 | 296 | ip = get_ipython() |
|
297 | 297 | relpath = event.symbol |
|
298 | 298 | |
|
299 | 299 | #print(event) # dbg |
|
300 | 300 | if event.line.endswith('-b') or ' -b ' in event.line: |
|
301 | 301 | # return only bookmark completions |
|
302 | 302 | bkms = self.db.get('bookmarks', None) |
|
303 | 303 | if bkms: |
|
304 | 304 | return bkms.keys() |
|
305 | 305 | else: |
|
306 | 306 | return [] |
|
307 | 307 | |
|
308 | 308 | if event.symbol == '-': |
|
309 | 309 | width_dh = str(len(str(len(ip.user_ns['_dh']) + 1))) |
|
310 | 310 | # jump in directory history by number |
|
311 | 311 | fmt = '-%0' + width_dh +'d [%s]' |
|
312 | 312 | ents = [ fmt % (i,s) for i,s in enumerate(ip.user_ns['_dh'])] |
|
313 | 313 | if len(ents) > 1: |
|
314 | 314 | return ents |
|
315 | 315 | return [] |
|
316 | 316 | |
|
317 | 317 | if event.symbol.startswith('--'): |
|
318 | 318 | return ["--" + os.path.basename(d) for d in ip.user_ns['_dh']] |
|
319 | 319 | |
|
320 | 320 | # Expand ~ in path and normalize directory separators. |
|
321 | 321 | relpath, tilde_expand, tilde_val = expand_user(relpath) |
|
322 | 322 | relpath = relpath.replace('\\','/') |
|
323 | 323 | |
|
324 | 324 | found = [] |
|
325 | 325 | for d in [f.replace('\\','/') + '/' for f in glob.glob(relpath+'*') |
|
326 | 326 | if os.path.isdir(f)]: |
|
327 | 327 | if ' ' in d: |
|
328 | 328 | # we don't want to deal with any of that, complex code |
|
329 | 329 | # for this is elsewhere |
|
330 | 330 | raise TryNext |
|
331 | 331 | |
|
332 | 332 | found.append(d) |
|
333 | 333 | |
|
334 | 334 | if not found: |
|
335 | 335 | if os.path.isdir(relpath): |
|
336 | 336 | return [compress_user(relpath, tilde_expand, tilde_val)] |
|
337 | 337 | |
|
338 | 338 | # if no completions so far, try bookmarks |
|
339 | bks = self.db.get('bookmarks',{}).keys() | |
|
339 | bks = self.db.get('bookmarks',{}).iterkeys() | |
|
340 | 340 | bkmatches = [s for s in bks if s.startswith(event.symbol)] |
|
341 | 341 | if bkmatches: |
|
342 | 342 | return bkmatches |
|
343 | 343 | |
|
344 | 344 | raise TryNext |
|
345 | 345 | |
|
346 | 346 | return [compress_user(p, tilde_expand, tilde_val) for p in found] |
@@ -1,283 +1,283 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ History related magics and functionality """ |
|
3 | 3 | |
|
4 | 4 | # Stdlib imports |
|
5 | 5 | import fnmatch |
|
6 | 6 | import os |
|
7 | 7 | |
|
8 | 8 | import IPython.utils.io |
|
9 | 9 | from IPython.utils.io import ask_yes_no |
|
10 | 10 | from IPython.utils.warn import warn |
|
11 | 11 | from IPython.core import ipapi |
|
12 | 12 | |
|
13 | 13 | def magic_history(self, parameter_s = ''): |
|
14 | 14 | """Print input history (_i<n> variables), with most recent last. |
|
15 | 15 | |
|
16 | 16 | %history -> print at most 40 inputs (some may be multi-line)\\ |
|
17 | 17 | %history n -> print at most n inputs\\ |
|
18 | 18 | %history n1 n2 -> print inputs between n1 and n2 (n2 not included)\\ |
|
19 | 19 | |
|
20 | 20 | By default, input history is printed without line numbers so it can be |
|
21 | 21 | directly pasted into an editor. |
|
22 | 22 | |
|
23 | 23 | With -n, each input's number <n> is shown, and is accessible as the |
|
24 | 24 | automatically generated variable _i<n> as well as In[<n>]. Multi-line |
|
25 | 25 | statements are printed starting at a new line for easy copy/paste. |
|
26 | 26 | |
|
27 | 27 | Options: |
|
28 | 28 | |
|
29 | 29 | -n: print line numbers for each input. |
|
30 | 30 | This feature is only available if numbered prompts are in use. |
|
31 | 31 | |
|
32 | 32 | -o: also print outputs for each input. |
|
33 | 33 | |
|
34 | 34 | -p: print classic '>>>' python prompts before each input. This is useful |
|
35 | 35 | for making documentation, and in conjunction with -o, for producing |
|
36 | 36 | doctest-ready output. |
|
37 | 37 | |
|
38 | 38 | -r: (default) print the 'raw' history, i.e. the actual commands you typed. |
|
39 | 39 | |
|
40 | 40 | -t: print the 'translated' history, as IPython understands it. IPython |
|
41 | 41 | filters your input and converts it all into valid Python source before |
|
42 | 42 | executing it (things like magics or aliases are turned into function |
|
43 | 43 | calls, for example). With this option, you'll see the native history |
|
44 | 44 | instead of the user-entered version: '%cd /' will be seen as |
|
45 | 45 | 'get_ipython().magic("%cd /")' instead of '%cd /'. |
|
46 | 46 | |
|
47 | 47 | -g: treat the arg as a pattern to grep for in (full) history. |
|
48 | 48 | This includes the "shadow history" (almost all commands ever written). |
|
49 | 49 | Use '%hist -g' to show full shadow history (may be very long). |
|
50 | 50 | In shadow history, every index nuwber starts with 0. |
|
51 | 51 | |
|
52 | 52 | -f FILENAME: instead of printing the output to the screen, redirect it to |
|
53 | 53 | the given file. The file is always overwritten, though IPython asks for |
|
54 | 54 | confirmation first if it already exists. |
|
55 | 55 | """ |
|
56 | 56 | |
|
57 | 57 | if not self.shell.displayhook.do_full_cache: |
|
58 | 58 | print 'This feature is only available if numbered prompts are in use.' |
|
59 | 59 | return |
|
60 | 60 | opts,args = self.parse_options(parameter_s,'gnoptsrf:',mode='list') |
|
61 | 61 | |
|
62 | 62 | # Check if output to specific file was requested. |
|
63 | 63 | try: |
|
64 | 64 | outfname = opts['f'] |
|
65 | 65 | except KeyError: |
|
66 | 66 | outfile = IPython.utils.io.Term.cout # default |
|
67 | 67 | # We don't want to close stdout at the end! |
|
68 | 68 | close_at_end = False |
|
69 | 69 | else: |
|
70 | 70 | if os.path.exists(outfname): |
|
71 | 71 | if not ask_yes_no("File %r exists. Overwrite?" % outfname): |
|
72 | 72 | print 'Aborting.' |
|
73 | 73 | return |
|
74 | 74 | |
|
75 | 75 | outfile = open(outfname,'w') |
|
76 | 76 | close_at_end = True |
|
77 | 77 | |
|
78 | 78 | if 't' in opts: |
|
79 | 79 | input_hist = self.shell.input_hist |
|
80 | 80 | elif 'r' in opts: |
|
81 | 81 | input_hist = self.shell.input_hist_raw |
|
82 | 82 | else: |
|
83 | 83 | # Raw history is the default |
|
84 | 84 | input_hist = self.shell.input_hist_raw |
|
85 | 85 | |
|
86 | 86 | default_length = 40 |
|
87 | 87 | pattern = None |
|
88 | 88 | if 'g' in opts: |
|
89 | 89 | init = 1 |
|
90 | 90 | final = len(input_hist) |
|
91 | 91 | parts = parameter_s.split(None, 1) |
|
92 | 92 | if len(parts) == 1: |
|
93 | 93 | parts += '*' |
|
94 | 94 | head, pattern = parts |
|
95 | 95 | pattern = "*" + pattern + "*" |
|
96 | 96 | elif len(args) == 0: |
|
97 | 97 | final = len(input_hist)-1 |
|
98 | 98 | init = max(1,final-default_length) |
|
99 | 99 | elif len(args) == 1: |
|
100 | 100 | final = len(input_hist) |
|
101 | 101 | init = max(1, final-int(args[0])) |
|
102 | 102 | elif len(args) == 2: |
|
103 | 103 | init, final = map(int, args) |
|
104 | 104 | else: |
|
105 | 105 | warn('%hist takes 0, 1 or 2 arguments separated by spaces.') |
|
106 | 106 | print >> IPython.utils.io.Term.cout, self.magic_hist.__doc__ |
|
107 | 107 | return |
|
108 | 108 | |
|
109 | 109 | width = len(str(final)) |
|
110 | 110 | line_sep = ['','\n'] |
|
111 | 111 | print_nums = 'n' in opts |
|
112 | 112 | print_outputs = 'o' in opts |
|
113 | 113 | pyprompts = 'p' in opts |
|
114 | 114 | |
|
115 | 115 | found = False |
|
116 | 116 | if pattern is not None: |
|
117 | 117 | sh = self.shell.shadowhist.all() |
|
118 | 118 | for idx, s in sh: |
|
119 | 119 | if fnmatch.fnmatch(s, pattern): |
|
120 | 120 | print >> outfile, "0%d: %s" %(idx, s.expandtabs(4)) |
|
121 | 121 | found = True |
|
122 | 122 | |
|
123 | 123 | if found: |
|
124 | 124 | print >> outfile, "===" |
|
125 | 125 | print >> outfile, \ |
|
126 | 126 | "shadow history ends, fetch by %rep <number> (must start with 0)" |
|
127 | 127 | print >> outfile, "=== start of normal history ===" |
|
128 | 128 | |
|
129 | 129 | for in_num in range(init, final): |
|
130 | 130 | # Print user history with tabs expanded to 4 spaces. The GUI clients |
|
131 | 131 | # use hard tabs for easier usability in auto-indented code, but we want |
|
132 | 132 | # to produce PEP-8 compliant history for safe pasting into an editor. |
|
133 | 133 | inline = input_hist[in_num].expandtabs(4) |
|
134 | 134 | |
|
135 | 135 | if pattern is not None and not fnmatch.fnmatch(inline, pattern): |
|
136 | 136 | continue |
|
137 | 137 | |
|
138 | 138 | multiline = int(inline.count('\n') > 1) |
|
139 | 139 | if print_nums: |
|
140 | 140 | print >> outfile, \ |
|
141 | 141 | '%s:%s' % (str(in_num).ljust(width), line_sep[multiline]), |
|
142 | 142 | if pyprompts: |
|
143 | 143 | print >> outfile, '>>>', |
|
144 | 144 | if multiline: |
|
145 | 145 | lines = inline.splitlines() |
|
146 | 146 | print >> outfile, '\n... '.join(lines) |
|
147 | 147 | print >> outfile, '... ' |
|
148 | 148 | else: |
|
149 | 149 | print >> outfile, inline, |
|
150 | 150 | else: |
|
151 | 151 | print >> outfile, inline, |
|
152 | 152 | if print_outputs: |
|
153 | 153 | output = self.shell.output_hist.get(in_num) |
|
154 | 154 | if output is not None: |
|
155 | 155 | print >> outfile, repr(output) |
|
156 | 156 | |
|
157 | 157 | if close_at_end: |
|
158 | 158 | outfile.close() |
|
159 | 159 | |
|
160 | 160 | |
|
161 | 161 | def magic_hist(self, parameter_s=''): |
|
162 | 162 | """Alternate name for %history.""" |
|
163 | 163 | return self.magic_history(parameter_s) |
|
164 | 164 | |
|
165 | 165 | |
|
166 | 166 | def rep_f(self, arg): |
|
167 | 167 | r""" Repeat a command, or get command to input line for editing |
|
168 | 168 | |
|
169 | 169 | - %rep (no arguments): |
|
170 | 170 | |
|
171 | 171 | Place a string version of last computation result (stored in the special '_' |
|
172 | 172 | variable) to the next input prompt. Allows you to create elaborate command |
|
173 | 173 | lines without using copy-paste:: |
|
174 | 174 | |
|
175 | 175 | $ l = ["hei", "vaan"] |
|
176 | 176 | $ "".join(l) |
|
177 | 177 | ==> heivaan |
|
178 | 178 | $ %rep |
|
179 | 179 | $ heivaan_ <== cursor blinking |
|
180 | 180 | |
|
181 | 181 | %rep 45 |
|
182 | 182 | |
|
183 | 183 | Place history line 45 to next input prompt. Use %hist to find out the |
|
184 | 184 | number. |
|
185 | 185 | |
|
186 | 186 | %rep 1-4 6-7 3 |
|
187 | 187 | |
|
188 | 188 | Repeat the specified lines immediately. Input slice syntax is the same as |
|
189 | 189 | in %macro and %save. |
|
190 | 190 | |
|
191 | 191 | %rep foo |
|
192 | 192 | |
|
193 | 193 | Place the most recent line that has the substring "foo" to next input. |
|
194 | 194 | (e.g. 'svn ci -m foobar'). |
|
195 | 195 | """ |
|
196 | 196 | |
|
197 | 197 | opts,args = self.parse_options(arg,'',mode='list') |
|
198 | 198 | if not args: |
|
199 | 199 | self.set_next_input(str(self.shell.user_ns["_"])) |
|
200 | 200 | return |
|
201 | 201 | |
|
202 | 202 | if len(args) == 1 and not '-' in args[0]: |
|
203 | 203 | arg = args[0] |
|
204 | 204 | if len(arg) > 1 and arg.startswith('0'): |
|
205 | 205 | # get from shadow hist |
|
206 | 206 | num = int(arg[1:]) |
|
207 | 207 | line = self.shell.shadowhist.get(num) |
|
208 | 208 | self.set_next_input(str(line)) |
|
209 | 209 | return |
|
210 | 210 | try: |
|
211 | 211 | num = int(args[0]) |
|
212 | 212 | self.set_next_input(str(self.shell.input_hist_raw[num]).rstrip()) |
|
213 | 213 | return |
|
214 | 214 | except ValueError: |
|
215 | 215 | pass |
|
216 | 216 | |
|
217 | 217 | for h in reversed(self.shell.input_hist_raw): |
|
218 | 218 | if 'rep' in h: |
|
219 | 219 | continue |
|
220 | 220 | if fnmatch.fnmatch(h,'*' + arg + '*'): |
|
221 | 221 | self.set_next_input(str(h).rstrip()) |
|
222 | 222 | return |
|
223 | 223 | |
|
224 | 224 | try: |
|
225 | 225 | lines = self.extract_input_slices(args, True) |
|
226 | 226 | print "lines",lines |
|
227 | 227 | self.runlines(lines) |
|
228 | 228 | except ValueError: |
|
229 | 229 | print "Not found in recent history:", args |
|
230 | 230 | |
|
231 | 231 | |
|
232 | 232 | _sentinel = object() |
|
233 | 233 | |
|
234 | 234 | class ShadowHist(object): |
|
235 | 235 | def __init__(self, db): |
|
236 | 236 | # cmd => idx mapping |
|
237 | 237 | self.curidx = 0 |
|
238 | 238 | self.db = db |
|
239 | 239 | self.disabled = False |
|
240 | 240 | |
|
241 | 241 | def inc_idx(self): |
|
242 | 242 | idx = self.db.get('shadowhist_idx', 1) |
|
243 | 243 | self.db['shadowhist_idx'] = idx + 1 |
|
244 | 244 | return idx |
|
245 | 245 | |
|
246 | 246 | def add(self, ent): |
|
247 | 247 | if self.disabled: |
|
248 | 248 | return |
|
249 | 249 | try: |
|
250 | 250 | old = self.db.hget('shadowhist', ent, _sentinel) |
|
251 | 251 | if old is not _sentinel: |
|
252 | 252 | return |
|
253 | 253 | newidx = self.inc_idx() |
|
254 | 254 | #print "new",newidx # dbg |
|
255 | 255 | self.db.hset('shadowhist',ent, newidx) |
|
256 | 256 | except: |
|
257 | 257 | ipapi.get().showtraceback() |
|
258 | 258 | print "WARNING: disabling shadow history" |
|
259 | 259 | self.disabled = True |
|
260 | 260 | |
|
261 | 261 | def all(self): |
|
262 | 262 | d = self.db.hdict('shadowhist') |
|
263 | items = [(i,s) for (s,i) in d.items()] | |
|
263 | items = [(i,s) for (s,i) in d.iteritems()] | |
|
264 | 264 | items.sort() |
|
265 | 265 | return items |
|
266 | 266 | |
|
267 | 267 | def get(self, idx): |
|
268 | 268 | all = self.all() |
|
269 | 269 | |
|
270 | 270 | for k, v in all: |
|
271 | 271 | #print k,v |
|
272 | 272 | if k == idx: |
|
273 | 273 | return v |
|
274 | 274 | |
|
275 | 275 | |
|
276 | 276 | def init_ipython(ip): |
|
277 | 277 | ip.define_magic("rep",rep_f) |
|
278 | 278 | ip.define_magic("hist",magic_hist) |
|
279 | 279 | ip.define_magic("history",magic_history) |
|
280 | 280 | |
|
281 | 281 | # XXX - ipy_completers are in quarantine, need to be updated to new apis |
|
282 | 282 | #import ipy_completers |
|
283 | 283 | #ipy_completers.quick_completer('%hist' ,'-g -t -r -n') |
@@ -1,2587 +1,2584 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Main IPython class.""" |
|
3 | 3 | |
|
4 | 4 | #----------------------------------------------------------------------------- |
|
5 | 5 | # Copyright (C) 2001 Janko Hauser <jhauser@zscout.de> |
|
6 | 6 | # Copyright (C) 2001-2007 Fernando Perez. <fperez@colorado.edu> |
|
7 | 7 | # Copyright (C) 2008-2010 The IPython Development Team |
|
8 | 8 | # |
|
9 | 9 | # Distributed under the terms of the BSD License. The full license is in |
|
10 | 10 | # the file COPYING, distributed as part of this software. |
|
11 | 11 | #----------------------------------------------------------------------------- |
|
12 | 12 | |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | # Imports |
|
15 | 15 | #----------------------------------------------------------------------------- |
|
16 | 16 | |
|
17 | 17 | from __future__ import with_statement |
|
18 | 18 | from __future__ import absolute_import |
|
19 | 19 | |
|
20 | 20 | import __builtin__ |
|
21 | 21 | import __future__ |
|
22 | 22 | import abc |
|
23 | 23 | import atexit |
|
24 | 24 | import codeop |
|
25 | import exceptions | |
|
26 | import new | |
|
27 | 25 | import os |
|
28 | 26 | import re |
|
29 | 27 | import string |
|
30 | 28 | import sys |
|
31 | 29 | import tempfile |
|
30 | import types | |
|
32 | 31 | from contextlib import nested |
|
33 | 32 | |
|
34 | 33 | from IPython.config.configurable import Configurable |
|
35 | 34 | from IPython.core import debugger, oinspect |
|
36 | 35 | from IPython.core import history as ipcorehist |
|
37 | 36 | from IPython.core import page |
|
38 | 37 | from IPython.core import prefilter |
|
39 | 38 | from IPython.core import shadowns |
|
40 | 39 | from IPython.core import ultratb |
|
41 | 40 | from IPython.core.alias import AliasManager |
|
42 | 41 | from IPython.core.builtin_trap import BuiltinTrap |
|
43 | 42 | from IPython.core.display_trap import DisplayTrap |
|
44 | 43 | from IPython.core.displayhook import DisplayHook |
|
45 | 44 | from IPython.core.error import TryNext, UsageError |
|
46 | 45 | from IPython.core.extensions import ExtensionManager |
|
47 | 46 | from IPython.core.fakemodule import FakeModule, init_fakemod_dict |
|
48 | 47 | from IPython.core.inputlist import InputList |
|
49 | 48 | from IPython.core.inputsplitter import IPythonInputSplitter |
|
50 | 49 | from IPython.core.logger import Logger |
|
51 | 50 | from IPython.core.magic import Magic |
|
52 | 51 | from IPython.core.payload import PayloadManager |
|
53 | 52 | from IPython.core.plugin import PluginManager |
|
54 | 53 | from IPython.core.prefilter import PrefilterManager, ESC_MAGIC |
|
55 | 54 | from IPython.external.Itpl import ItplNS |
|
56 | 55 | from IPython.utils import PyColorize |
|
57 | 56 | from IPython.utils import io |
|
58 | 57 | from IPython.utils import pickleshare |
|
59 | 58 | from IPython.utils.doctestreload import doctest_reload |
|
60 | 59 | from IPython.utils.io import ask_yes_no, rprint |
|
61 | 60 | from IPython.utils.ipstruct import Struct |
|
62 | 61 | from IPython.utils.path import get_home_dir, get_ipython_dir, HomeDirError |
|
63 | 62 | from IPython.utils.process import system, getoutput |
|
64 | 63 | from IPython.utils.strdispatch import StrDispatch |
|
65 | 64 | from IPython.utils.syspathcontext import prepended_to_syspath |
|
66 | 65 | from IPython.utils.text import num_ini_spaces, format_screen, LSString, SList |
|
67 | 66 | from IPython.utils.traitlets import (Int, Str, CBool, CaselessStrEnum, Enum, |
|
68 | 67 | List, Unicode, Instance, Type) |
|
69 | 68 | from IPython.utils.warn import warn, error, fatal |
|
70 | 69 | import IPython.core.hooks |
|
71 | 70 | |
|
72 | 71 | #----------------------------------------------------------------------------- |
|
73 | 72 | # Globals |
|
74 | 73 | #----------------------------------------------------------------------------- |
|
75 | 74 | |
|
76 | 75 | # compiled regexps for autoindent management |
|
77 | 76 | dedent_re = re.compile(r'^\s+raise|^\s+return|^\s+pass') |
|
78 | 77 | |
|
79 | 78 | #----------------------------------------------------------------------------- |
|
80 | 79 | # Utilities |
|
81 | 80 | #----------------------------------------------------------------------------- |
|
82 | 81 | |
|
83 | 82 | # store the builtin raw_input globally, and use this always, in case user code |
|
84 | 83 | # overwrites it (like wx.py.PyShell does) |
|
85 | 84 | raw_input_original = raw_input |
|
86 | 85 | |
|
87 | 86 | def softspace(file, newvalue): |
|
88 | 87 | """Copied from code.py, to remove the dependency""" |
|
89 | 88 | |
|
90 | 89 | oldvalue = 0 |
|
91 | 90 | try: |
|
92 | 91 | oldvalue = file.softspace |
|
93 | 92 | except AttributeError: |
|
94 | 93 | pass |
|
95 | 94 | try: |
|
96 | 95 | file.softspace = newvalue |
|
97 | 96 | except (AttributeError, TypeError): |
|
98 | 97 | # "attribute-less object" or "read-only attributes" |
|
99 | 98 | pass |
|
100 | 99 | return oldvalue |
|
101 | 100 | |
|
102 | 101 | |
|
103 | 102 | def no_op(*a, **kw): pass |
|
104 | 103 | |
|
105 |
class SpaceInInput( |
|
|
104 | class SpaceInInput(Exception): pass | |
|
106 | 105 | |
|
107 | 106 | class Bunch: pass |
|
108 | 107 | |
|
109 | 108 | |
|
110 | 109 | def get_default_colors(): |
|
111 | 110 | if sys.platform=='darwin': |
|
112 | 111 | return "LightBG" |
|
113 | 112 | elif os.name=='nt': |
|
114 | 113 | return 'Linux' |
|
115 | 114 | else: |
|
116 | 115 | return 'Linux' |
|
117 | 116 | |
|
118 | 117 | |
|
119 | 118 | class SeparateStr(Str): |
|
120 | 119 | """A Str subclass to validate separate_in, separate_out, etc. |
|
121 | 120 | |
|
122 | 121 | This is a Str based trait that converts '0'->'' and '\\n'->'\n'. |
|
123 | 122 | """ |
|
124 | 123 | |
|
125 | 124 | def validate(self, obj, value): |
|
126 | 125 | if value == '0': value = '' |
|
127 | 126 | value = value.replace('\\n','\n') |
|
128 | 127 | return super(SeparateStr, self).validate(obj, value) |
|
129 | 128 | |
|
130 | 129 | class MultipleInstanceError(Exception): |
|
131 | 130 | pass |
|
132 | 131 | |
|
133 | 132 | |
|
134 | 133 | #----------------------------------------------------------------------------- |
|
135 | 134 | # Main IPython class |
|
136 | 135 | #----------------------------------------------------------------------------- |
|
137 | 136 | |
|
138 | 137 | |
|
139 | 138 | class InteractiveShell(Configurable, Magic): |
|
140 | 139 | """An enhanced, interactive shell for Python.""" |
|
141 | 140 | |
|
142 | 141 | _instance = None |
|
143 | 142 | autocall = Enum((0,1,2), default_value=1, config=True) |
|
144 | 143 | # TODO: remove all autoindent logic and put into frontends. |
|
145 | 144 | # We can't do this yet because even runlines uses the autoindent. |
|
146 | 145 | autoindent = CBool(True, config=True) |
|
147 | 146 | automagic = CBool(True, config=True) |
|
148 | 147 | cache_size = Int(1000, config=True) |
|
149 | 148 | color_info = CBool(True, config=True) |
|
150 | 149 | colors = CaselessStrEnum(('NoColor','LightBG','Linux'), |
|
151 | 150 | default_value=get_default_colors(), config=True) |
|
152 | 151 | debug = CBool(False, config=True) |
|
153 | 152 | deep_reload = CBool(False, config=True) |
|
154 | 153 | displayhook_class = Type(DisplayHook) |
|
155 | 154 | exit_now = CBool(False) |
|
156 | 155 | filename = Str("<ipython console>") |
|
157 | 156 | ipython_dir= Unicode('', config=True) # Set to get_ipython_dir() in __init__ |
|
158 | 157 | |
|
159 | 158 | # Input splitter, to split entire cells of input into either individual |
|
160 | 159 | # interactive statements or whole blocks. |
|
161 | 160 | input_splitter = Instance('IPython.core.inputsplitter.IPythonInputSplitter', |
|
162 | 161 | (), {}) |
|
163 | 162 | logstart = CBool(False, config=True) |
|
164 | 163 | logfile = Str('', config=True) |
|
165 | 164 | logappend = Str('', config=True) |
|
166 | 165 | object_info_string_level = Enum((0,1,2), default_value=0, |
|
167 | 166 | config=True) |
|
168 | 167 | pdb = CBool(False, config=True) |
|
169 | 168 | |
|
170 | 169 | pprint = CBool(True, config=True) |
|
171 | 170 | profile = Str('', config=True) |
|
172 | 171 | prompt_in1 = Str('In [\\#]: ', config=True) |
|
173 | 172 | prompt_in2 = Str(' .\\D.: ', config=True) |
|
174 | 173 | prompt_out = Str('Out[\\#]: ', config=True) |
|
175 | 174 | prompts_pad_left = CBool(True, config=True) |
|
176 | 175 | quiet = CBool(False, config=True) |
|
177 | 176 | |
|
178 | 177 | # The readline stuff will eventually be moved to the terminal subclass |
|
179 | 178 | # but for now, we can't do that as readline is welded in everywhere. |
|
180 | 179 | readline_use = CBool(True, config=True) |
|
181 | 180 | readline_merge_completions = CBool(True, config=True) |
|
182 | 181 | readline_omit__names = Enum((0,1,2), default_value=0, config=True) |
|
183 | 182 | readline_remove_delims = Str('-/~', config=True) |
|
184 | 183 | readline_parse_and_bind = List([ |
|
185 | 184 | 'tab: complete', |
|
186 | 185 | '"\C-l": clear-screen', |
|
187 | 186 | 'set show-all-if-ambiguous on', |
|
188 | 187 | '"\C-o": tab-insert', |
|
189 | 188 | '"\M-i": " "', |
|
190 | 189 | '"\M-o": "\d\d\d\d"', |
|
191 | 190 | '"\M-I": "\d\d\d\d"', |
|
192 | 191 | '"\C-r": reverse-search-history', |
|
193 | 192 | '"\C-s": forward-search-history', |
|
194 | 193 | '"\C-p": history-search-backward', |
|
195 | 194 | '"\C-n": history-search-forward', |
|
196 | 195 | '"\e[A": history-search-backward', |
|
197 | 196 | '"\e[B": history-search-forward', |
|
198 | 197 | '"\C-k": kill-line', |
|
199 | 198 | '"\C-u": unix-line-discard', |
|
200 | 199 | ], allow_none=False, config=True) |
|
201 | 200 | |
|
202 | 201 | # TODO: this part of prompt management should be moved to the frontends. |
|
203 | 202 | # Use custom TraitTypes that convert '0'->'' and '\\n'->'\n' |
|
204 | 203 | separate_in = SeparateStr('\n', config=True) |
|
205 | 204 | separate_out = SeparateStr('', config=True) |
|
206 | 205 | separate_out2 = SeparateStr('', config=True) |
|
207 | 206 | wildcards_case_sensitive = CBool(True, config=True) |
|
208 | 207 | xmode = CaselessStrEnum(('Context','Plain', 'Verbose'), |
|
209 | 208 | default_value='Context', config=True) |
|
210 | 209 | |
|
211 | 210 | # Subcomponents of InteractiveShell |
|
212 | 211 | alias_manager = Instance('IPython.core.alias.AliasManager') |
|
213 | 212 | prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager') |
|
214 | 213 | builtin_trap = Instance('IPython.core.builtin_trap.BuiltinTrap') |
|
215 | 214 | display_trap = Instance('IPython.core.display_trap.DisplayTrap') |
|
216 | 215 | extension_manager = Instance('IPython.core.extensions.ExtensionManager') |
|
217 | 216 | plugin_manager = Instance('IPython.core.plugin.PluginManager') |
|
218 | 217 | payload_manager = Instance('IPython.core.payload.PayloadManager') |
|
219 | 218 | |
|
220 | 219 | # Private interface |
|
221 | 220 | _post_execute = set() |
|
222 | 221 | |
|
223 | 222 | def __init__(self, config=None, ipython_dir=None, |
|
224 | 223 | user_ns=None, user_global_ns=None, |
|
225 | 224 | custom_exceptions=((), None)): |
|
226 | 225 | |
|
227 | 226 | # This is where traits with a config_key argument are updated |
|
228 | 227 | # from the values on config. |
|
229 | 228 | super(InteractiveShell, self).__init__(config=config) |
|
230 | 229 | |
|
231 | 230 | # These are relatively independent and stateless |
|
232 | 231 | self.init_ipython_dir(ipython_dir) |
|
233 | 232 | self.init_instance_attrs() |
|
234 | 233 | self.init_environment() |
|
235 | 234 | |
|
236 | 235 | # Create namespaces (user_ns, user_global_ns, etc.) |
|
237 | 236 | self.init_create_namespaces(user_ns, user_global_ns) |
|
238 | 237 | # This has to be done after init_create_namespaces because it uses |
|
239 | 238 | # something in self.user_ns, but before init_sys_modules, which |
|
240 | 239 | # is the first thing to modify sys. |
|
241 | 240 | # TODO: When we override sys.stdout and sys.stderr before this class |
|
242 | 241 | # is created, we are saving the overridden ones here. Not sure if this |
|
243 | 242 | # is what we want to do. |
|
244 | 243 | self.save_sys_module_state() |
|
245 | 244 | self.init_sys_modules() |
|
246 | 245 | |
|
247 | 246 | self.init_history() |
|
248 | 247 | self.init_encoding() |
|
249 | 248 | self.init_prefilter() |
|
250 | 249 | |
|
251 | 250 | Magic.__init__(self, self) |
|
252 | 251 | |
|
253 | 252 | self.init_syntax_highlighting() |
|
254 | 253 | self.init_hooks() |
|
255 | 254 | self.init_pushd_popd_magic() |
|
256 | 255 | # self.init_traceback_handlers use to be here, but we moved it below |
|
257 | 256 | # because it and init_io have to come after init_readline. |
|
258 | 257 | self.init_user_ns() |
|
259 | 258 | self.init_logger() |
|
260 | 259 | self.init_alias() |
|
261 | 260 | self.init_builtins() |
|
262 | 261 | |
|
263 | 262 | # pre_config_initialization |
|
264 | 263 | self.init_shadow_hist() |
|
265 | 264 | |
|
266 | 265 | # The next section should contain everything that was in ipmaker. |
|
267 | 266 | self.init_logstart() |
|
268 | 267 | |
|
269 | 268 | # The following was in post_config_initialization |
|
270 | 269 | self.init_inspector() |
|
271 | 270 | # init_readline() must come before init_io(), because init_io uses |
|
272 | 271 | # readline related things. |
|
273 | 272 | self.init_readline() |
|
274 | 273 | # init_completer must come after init_readline, because it needs to |
|
275 | 274 | # know whether readline is present or not system-wide to configure the |
|
276 | 275 | # completers, since the completion machinery can now operate |
|
277 | 276 | # independently of readline (e.g. over the network) |
|
278 | 277 | self.init_completer() |
|
279 | 278 | # TODO: init_io() needs to happen before init_traceback handlers |
|
280 | 279 | # because the traceback handlers hardcode the stdout/stderr streams. |
|
281 | 280 | # This logic in in debugger.Pdb and should eventually be changed. |
|
282 | 281 | self.init_io() |
|
283 | 282 | self.init_traceback_handlers(custom_exceptions) |
|
284 | 283 | self.init_prompts() |
|
285 | 284 | self.init_displayhook() |
|
286 | 285 | self.init_reload_doctest() |
|
287 | 286 | self.init_magics() |
|
288 | 287 | self.init_pdb() |
|
289 | 288 | self.init_extension_manager() |
|
290 | 289 | self.init_plugin_manager() |
|
291 | 290 | self.init_payload() |
|
292 | 291 | self.hooks.late_startup_hook() |
|
293 | 292 | atexit.register(self.atexit_operations) |
|
294 | 293 | |
|
295 | 294 | @classmethod |
|
296 | 295 | def instance(cls, *args, **kwargs): |
|
297 | 296 | """Returns a global InteractiveShell instance.""" |
|
298 | 297 | if cls._instance is None: |
|
299 | 298 | inst = cls(*args, **kwargs) |
|
300 | 299 | # Now make sure that the instance will also be returned by |
|
301 | 300 | # the subclasses instance attribute. |
|
302 | 301 | for subclass in cls.mro(): |
|
303 | 302 | if issubclass(cls, subclass) and \ |
|
304 | 303 | issubclass(subclass, InteractiveShell): |
|
305 | 304 | subclass._instance = inst |
|
306 | 305 | else: |
|
307 | 306 | break |
|
308 | 307 | if isinstance(cls._instance, cls): |
|
309 | 308 | return cls._instance |
|
310 | 309 | else: |
|
311 | 310 | raise MultipleInstanceError( |
|
312 | 311 | 'Multiple incompatible subclass instances of ' |
|
313 | 312 | 'InteractiveShell are being created.' |
|
314 | 313 | ) |
|
315 | 314 | |
|
316 | 315 | @classmethod |
|
317 | 316 | def initialized(cls): |
|
318 | 317 | return hasattr(cls, "_instance") |
|
319 | 318 | |
|
320 | 319 | def get_ipython(self): |
|
321 | 320 | """Return the currently running IPython instance.""" |
|
322 | 321 | return self |
|
323 | 322 | |
|
324 | 323 | #------------------------------------------------------------------------- |
|
325 | 324 | # Trait changed handlers |
|
326 | 325 | #------------------------------------------------------------------------- |
|
327 | 326 | |
|
328 | 327 | def _ipython_dir_changed(self, name, new): |
|
329 | 328 | if not os.path.isdir(new): |
|
330 | 329 | os.makedirs(new, mode = 0777) |
|
331 | 330 | |
|
332 | 331 | def set_autoindent(self,value=None): |
|
333 | 332 | """Set the autoindent flag, checking for readline support. |
|
334 | 333 | |
|
335 | 334 | If called with no arguments, it acts as a toggle.""" |
|
336 | 335 | |
|
337 | 336 | if not self.has_readline: |
|
338 | 337 | if os.name == 'posix': |
|
339 | 338 | warn("The auto-indent feature requires the readline library") |
|
340 | 339 | self.autoindent = 0 |
|
341 | 340 | return |
|
342 | 341 | if value is None: |
|
343 | 342 | self.autoindent = not self.autoindent |
|
344 | 343 | else: |
|
345 | 344 | self.autoindent = value |
|
346 | 345 | |
|
347 | 346 | #------------------------------------------------------------------------- |
|
348 | 347 | # init_* methods called by __init__ |
|
349 | 348 | #------------------------------------------------------------------------- |
|
350 | 349 | |
|
351 | 350 | def init_ipython_dir(self, ipython_dir): |
|
352 | 351 | if ipython_dir is not None: |
|
353 | 352 | self.ipython_dir = ipython_dir |
|
354 | 353 | self.config.Global.ipython_dir = self.ipython_dir |
|
355 | 354 | return |
|
356 | 355 | |
|
357 | 356 | if hasattr(self.config.Global, 'ipython_dir'): |
|
358 | 357 | self.ipython_dir = self.config.Global.ipython_dir |
|
359 | 358 | else: |
|
360 | 359 | self.ipython_dir = get_ipython_dir() |
|
361 | 360 | |
|
362 | 361 | # All children can just read this |
|
363 | 362 | self.config.Global.ipython_dir = self.ipython_dir |
|
364 | 363 | |
|
365 | 364 | def init_instance_attrs(self): |
|
366 | 365 | self.more = False |
|
367 | 366 | |
|
368 | 367 | # command compiler |
|
369 | 368 | self.compile = codeop.CommandCompiler() |
|
370 | 369 | |
|
371 | 370 | # User input buffer |
|
372 | 371 | self.buffer = [] |
|
373 | 372 | |
|
374 | 373 | # Make an empty namespace, which extension writers can rely on both |
|
375 | 374 | # existing and NEVER being used by ipython itself. This gives them a |
|
376 | 375 | # convenient location for storing additional information and state |
|
377 | 376 | # their extensions may require, without fear of collisions with other |
|
378 | 377 | # ipython names that may develop later. |
|
379 | 378 | self.meta = Struct() |
|
380 | 379 | |
|
381 | 380 | # Object variable to store code object waiting execution. This is |
|
382 | 381 | # used mainly by the multithreaded shells, but it can come in handy in |
|
383 | 382 | # other situations. No need to use a Queue here, since it's a single |
|
384 | 383 | # item which gets cleared once run. |
|
385 | 384 | self.code_to_run = None |
|
386 | 385 | |
|
387 | 386 | # Temporary files used for various purposes. Deleted at exit. |
|
388 | 387 | self.tempfiles = [] |
|
389 | 388 | |
|
390 | 389 | # Keep track of readline usage (later set by init_readline) |
|
391 | 390 | self.has_readline = False |
|
392 | 391 | |
|
393 | 392 | # keep track of where we started running (mainly for crash post-mortem) |
|
394 | 393 | # This is not being used anywhere currently. |
|
395 | 394 | self.starting_dir = os.getcwd() |
|
396 | 395 | |
|
397 | 396 | # Indentation management |
|
398 | 397 | self.indent_current_nsp = 0 |
|
399 | 398 | |
|
400 | 399 | def init_environment(self): |
|
401 | 400 | """Any changes we need to make to the user's environment.""" |
|
402 | 401 | pass |
|
403 | 402 | |
|
404 | 403 | def init_encoding(self): |
|
405 | 404 | # Get system encoding at startup time. Certain terminals (like Emacs |
|
406 | 405 | # under Win32 have it set to None, and we need to have a known valid |
|
407 | 406 | # encoding to use in the raw_input() method |
|
408 | 407 | try: |
|
409 | 408 | self.stdin_encoding = sys.stdin.encoding or 'ascii' |
|
410 | 409 | except AttributeError: |
|
411 | 410 | self.stdin_encoding = 'ascii' |
|
412 | 411 | |
|
413 | 412 | def init_syntax_highlighting(self): |
|
414 | 413 | # Python source parser/formatter for syntax highlighting |
|
415 | 414 | pyformat = PyColorize.Parser().format |
|
416 | 415 | self.pycolorize = lambda src: pyformat(src,'str',self.colors) |
|
417 | 416 | |
|
418 | 417 | def init_pushd_popd_magic(self): |
|
419 | 418 | # for pushd/popd management |
|
420 | 419 | try: |
|
421 | 420 | self.home_dir = get_home_dir() |
|
422 | 421 | except HomeDirError, msg: |
|
423 | 422 | fatal(msg) |
|
424 | 423 | |
|
425 | 424 | self.dir_stack = [] |
|
426 | 425 | |
|
427 | 426 | def init_logger(self): |
|
428 | 427 | self.logger = Logger(self, logfname='ipython_log.py', logmode='rotate') |
|
429 | 428 | # local shortcut, this is used a LOT |
|
430 | 429 | self.log = self.logger.log |
|
431 | 430 | |
|
432 | 431 | def init_logstart(self): |
|
433 | 432 | if self.logappend: |
|
434 | 433 | self.magic_logstart(self.logappend + ' append') |
|
435 | 434 | elif self.logfile: |
|
436 | 435 | self.magic_logstart(self.logfile) |
|
437 | 436 | elif self.logstart: |
|
438 | 437 | self.magic_logstart() |
|
439 | 438 | |
|
440 | 439 | def init_builtins(self): |
|
441 | 440 | self.builtin_trap = BuiltinTrap(shell=self) |
|
442 | 441 | |
|
443 | 442 | def init_inspector(self): |
|
444 | 443 | # Object inspector |
|
445 | 444 | self.inspector = oinspect.Inspector(oinspect.InspectColors, |
|
446 | 445 | PyColorize.ANSICodeColors, |
|
447 | 446 | 'NoColor', |
|
448 | 447 | self.object_info_string_level) |
|
449 | 448 | |
|
450 | 449 | def init_io(self): |
|
451 | 450 | # This will just use sys.stdout and sys.stderr. If you want to |
|
452 | 451 | # override sys.stdout and sys.stderr themselves, you need to do that |
|
453 | 452 | # *before* instantiating this class, because Term holds onto |
|
454 | 453 | # references to the underlying streams. |
|
455 | 454 | if sys.platform == 'win32' and self.has_readline: |
|
456 | 455 | Term = io.IOTerm(cout=self.readline._outputfile, |
|
457 | 456 | cerr=self.readline._outputfile) |
|
458 | 457 | else: |
|
459 | 458 | Term = io.IOTerm() |
|
460 | 459 | io.Term = Term |
|
461 | 460 | |
|
462 | 461 | def init_prompts(self): |
|
463 | 462 | # TODO: This is a pass for now because the prompts are managed inside |
|
464 | 463 | # the DisplayHook. Once there is a separate prompt manager, this |
|
465 | 464 | # will initialize that object and all prompt related information. |
|
466 | 465 | pass |
|
467 | 466 | |
|
468 | 467 | def init_displayhook(self): |
|
469 | 468 | # Initialize displayhook, set in/out prompts and printing system |
|
470 | 469 | self.displayhook = self.displayhook_class( |
|
471 | 470 | shell=self, |
|
472 | 471 | cache_size=self.cache_size, |
|
473 | 472 | input_sep = self.separate_in, |
|
474 | 473 | output_sep = self.separate_out, |
|
475 | 474 | output_sep2 = self.separate_out2, |
|
476 | 475 | ps1 = self.prompt_in1, |
|
477 | 476 | ps2 = self.prompt_in2, |
|
478 | 477 | ps_out = self.prompt_out, |
|
479 | 478 | pad_left = self.prompts_pad_left |
|
480 | 479 | ) |
|
481 | 480 | # This is a context manager that installs/revmoes the displayhook at |
|
482 | 481 | # the appropriate time. |
|
483 | 482 | self.display_trap = DisplayTrap(hook=self.displayhook) |
|
484 | 483 | |
|
485 | 484 | def init_reload_doctest(self): |
|
486 | 485 | # Do a proper resetting of doctest, including the necessary displayhook |
|
487 | 486 | # monkeypatching |
|
488 | 487 | try: |
|
489 | 488 | doctest_reload() |
|
490 | 489 | except ImportError: |
|
491 | 490 | warn("doctest module does not exist.") |
|
492 | 491 | |
|
493 | 492 | #------------------------------------------------------------------------- |
|
494 | 493 | # Things related to injections into the sys module |
|
495 | 494 | #------------------------------------------------------------------------- |
|
496 | 495 | |
|
497 | 496 | def save_sys_module_state(self): |
|
498 | 497 | """Save the state of hooks in the sys module. |
|
499 | 498 | |
|
500 | 499 | This has to be called after self.user_ns is created. |
|
501 | 500 | """ |
|
502 | 501 | self._orig_sys_module_state = {} |
|
503 | 502 | self._orig_sys_module_state['stdin'] = sys.stdin |
|
504 | 503 | self._orig_sys_module_state['stdout'] = sys.stdout |
|
505 | 504 | self._orig_sys_module_state['stderr'] = sys.stderr |
|
506 | 505 | self._orig_sys_module_state['excepthook'] = sys.excepthook |
|
507 | 506 | try: |
|
508 | 507 | self._orig_sys_modules_main_name = self.user_ns['__name__'] |
|
509 | 508 | except KeyError: |
|
510 | 509 | pass |
|
511 | 510 | |
|
512 | 511 | def restore_sys_module_state(self): |
|
513 | 512 | """Restore the state of the sys module.""" |
|
514 | 513 | try: |
|
515 | for k, v in self._orig_sys_module_state.items(): | |
|
514 | for k, v in self._orig_sys_module_state.iteritems(): | |
|
516 | 515 | setattr(sys, k, v) |
|
517 | 516 | except AttributeError: |
|
518 | 517 | pass |
|
519 | 518 | # Reset what what done in self.init_sys_modules |
|
520 | 519 | try: |
|
521 | 520 | sys.modules[self.user_ns['__name__']] = self._orig_sys_modules_main_name |
|
522 | 521 | except (AttributeError, KeyError): |
|
523 | 522 | pass |
|
524 | 523 | |
|
525 | 524 | #------------------------------------------------------------------------- |
|
526 | 525 | # Things related to hooks |
|
527 | 526 | #------------------------------------------------------------------------- |
|
528 | 527 | |
|
529 | 528 | def init_hooks(self): |
|
530 | 529 | # hooks holds pointers used for user-side customizations |
|
531 | 530 | self.hooks = Struct() |
|
532 | 531 | |
|
533 | 532 | self.strdispatchers = {} |
|
534 | 533 | |
|
535 | 534 | # Set all default hooks, defined in the IPython.hooks module. |
|
536 | 535 | hooks = IPython.core.hooks |
|
537 | 536 | for hook_name in hooks.__all__: |
|
538 | 537 | # default hooks have priority 100, i.e. low; user hooks should have |
|
539 | 538 | # 0-100 priority |
|
540 | 539 | self.set_hook(hook_name,getattr(hooks,hook_name), 100) |
|
541 | 540 | |
|
542 | 541 | def set_hook(self,name,hook, priority = 50, str_key = None, re_key = None): |
|
543 | 542 | """set_hook(name,hook) -> sets an internal IPython hook. |
|
544 | 543 | |
|
545 | 544 | IPython exposes some of its internal API as user-modifiable hooks. By |
|
546 | 545 | adding your function to one of these hooks, you can modify IPython's |
|
547 | 546 | behavior to call at runtime your own routines.""" |
|
548 | 547 | |
|
549 | 548 | # At some point in the future, this should validate the hook before it |
|
550 | 549 | # accepts it. Probably at least check that the hook takes the number |
|
551 | 550 | # of args it's supposed to. |
|
552 | 551 | |
|
553 |
f = |
|
|
552 | f = types.MethodType(hook, self) | |
|
554 | 553 | |
|
555 | 554 | # check if the hook is for strdispatcher first |
|
556 | 555 | if str_key is not None: |
|
557 | 556 | sdp = self.strdispatchers.get(name, StrDispatch()) |
|
558 | 557 | sdp.add_s(str_key, f, priority ) |
|
559 | 558 | self.strdispatchers[name] = sdp |
|
560 | 559 | return |
|
561 | 560 | if re_key is not None: |
|
562 | 561 | sdp = self.strdispatchers.get(name, StrDispatch()) |
|
563 | 562 | sdp.add_re(re.compile(re_key), f, priority ) |
|
564 | 563 | self.strdispatchers[name] = sdp |
|
565 | 564 | return |
|
566 | 565 | |
|
567 | 566 | dp = getattr(self.hooks, name, None) |
|
568 | 567 | if name not in IPython.core.hooks.__all__: |
|
569 | 568 | print "Warning! Hook '%s' is not one of %s" % \ |
|
570 | 569 | (name, IPython.core.hooks.__all__ ) |
|
571 | 570 | if not dp: |
|
572 | 571 | dp = IPython.core.hooks.CommandChainDispatcher() |
|
573 | 572 | |
|
574 | 573 | try: |
|
575 | 574 | dp.add(f,priority) |
|
576 | 575 | except AttributeError: |
|
577 | 576 | # it was not commandchain, plain old func - replace |
|
578 | 577 | dp = f |
|
579 | 578 | |
|
580 | 579 | setattr(self.hooks,name, dp) |
|
581 | 580 | |
|
582 | 581 | def register_post_execute(self, func): |
|
583 | 582 | """Register a function for calling after code execution. |
|
584 | 583 | """ |
|
585 | 584 | if not callable(func): |
|
586 | 585 | raise ValueError('argument %s must be callable' % func) |
|
587 | 586 | self._post_execute.add(func) |
|
588 | 587 | |
|
589 | 588 | #------------------------------------------------------------------------- |
|
590 | 589 | # Things related to the "main" module |
|
591 | 590 | #------------------------------------------------------------------------- |
|
592 | 591 | |
|
593 | 592 | def new_main_mod(self,ns=None): |
|
594 | 593 | """Return a new 'main' module object for user code execution. |
|
595 | 594 | """ |
|
596 | 595 | main_mod = self._user_main_module |
|
597 | 596 | init_fakemod_dict(main_mod,ns) |
|
598 | 597 | return main_mod |
|
599 | 598 | |
|
600 | 599 | def cache_main_mod(self,ns,fname): |
|
601 | 600 | """Cache a main module's namespace. |
|
602 | 601 | |
|
603 | 602 | When scripts are executed via %run, we must keep a reference to the |
|
604 | 603 | namespace of their __main__ module (a FakeModule instance) around so |
|
605 | 604 | that Python doesn't clear it, rendering objects defined therein |
|
606 | 605 | useless. |
|
607 | 606 | |
|
608 | 607 | This method keeps said reference in a private dict, keyed by the |
|
609 | 608 | absolute path of the module object (which corresponds to the script |
|
610 | 609 | path). This way, for multiple executions of the same script we only |
|
611 | 610 | keep one copy of the namespace (the last one), thus preventing memory |
|
612 | 611 | leaks from old references while allowing the objects from the last |
|
613 | 612 | execution to be accessible. |
|
614 | 613 | |
|
615 | 614 | Note: we can not allow the actual FakeModule instances to be deleted, |
|
616 | 615 | because of how Python tears down modules (it hard-sets all their |
|
617 | 616 | references to None without regard for reference counts). This method |
|
618 | 617 | must therefore make a *copy* of the given namespace, to allow the |
|
619 | 618 | original module's __dict__ to be cleared and reused. |
|
620 | 619 | |
|
621 | 620 | |
|
622 | 621 | Parameters |
|
623 | 622 | ---------- |
|
624 | 623 | ns : a namespace (a dict, typically) |
|
625 | 624 | |
|
626 | 625 | fname : str |
|
627 | 626 | Filename associated with the namespace. |
|
628 | 627 | |
|
629 | 628 | Examples |
|
630 | 629 | -------- |
|
631 | 630 | |
|
632 | 631 | In [10]: import IPython |
|
633 | 632 | |
|
634 | 633 | In [11]: _ip.cache_main_mod(IPython.__dict__,IPython.__file__) |
|
635 | 634 | |
|
636 | 635 | In [12]: IPython.__file__ in _ip._main_ns_cache |
|
637 | 636 | Out[12]: True |
|
638 | 637 | """ |
|
639 | 638 | self._main_ns_cache[os.path.abspath(fname)] = ns.copy() |
|
640 | 639 | |
|
641 | 640 | def clear_main_mod_cache(self): |
|
642 | 641 | """Clear the cache of main modules. |
|
643 | 642 | |
|
644 | 643 | Mainly for use by utilities like %reset. |
|
645 | 644 | |
|
646 | 645 | Examples |
|
647 | 646 | -------- |
|
648 | 647 | |
|
649 | 648 | In [15]: import IPython |
|
650 | 649 | |
|
651 | 650 | In [16]: _ip.cache_main_mod(IPython.__dict__,IPython.__file__) |
|
652 | 651 | |
|
653 | 652 | In [17]: len(_ip._main_ns_cache) > 0 |
|
654 | 653 | Out[17]: True |
|
655 | 654 | |
|
656 | 655 | In [18]: _ip.clear_main_mod_cache() |
|
657 | 656 | |
|
658 | 657 | In [19]: len(_ip._main_ns_cache) == 0 |
|
659 | 658 | Out[19]: True |
|
660 | 659 | """ |
|
661 | 660 | self._main_ns_cache.clear() |
|
662 | 661 | |
|
663 | 662 | #------------------------------------------------------------------------- |
|
664 | 663 | # Things related to debugging |
|
665 | 664 | #------------------------------------------------------------------------- |
|
666 | 665 | |
|
667 | 666 | def init_pdb(self): |
|
668 | 667 | # Set calling of pdb on exceptions |
|
669 | 668 | # self.call_pdb is a property |
|
670 | 669 | self.call_pdb = self.pdb |
|
671 | 670 | |
|
672 | 671 | def _get_call_pdb(self): |
|
673 | 672 | return self._call_pdb |
|
674 | 673 | |
|
675 | 674 | def _set_call_pdb(self,val): |
|
676 | 675 | |
|
677 | 676 | if val not in (0,1,False,True): |
|
678 | 677 | raise ValueError,'new call_pdb value must be boolean' |
|
679 | 678 | |
|
680 | 679 | # store value in instance |
|
681 | 680 | self._call_pdb = val |
|
682 | 681 | |
|
683 | 682 | # notify the actual exception handlers |
|
684 | 683 | self.InteractiveTB.call_pdb = val |
|
685 | 684 | |
|
686 | 685 | call_pdb = property(_get_call_pdb,_set_call_pdb,None, |
|
687 | 686 | 'Control auto-activation of pdb at exceptions') |
|
688 | 687 | |
|
689 | 688 | def debugger(self,force=False): |
|
690 | 689 | """Call the pydb/pdb debugger. |
|
691 | 690 | |
|
692 | 691 | Keywords: |
|
693 | 692 | |
|
694 | 693 | - force(False): by default, this routine checks the instance call_pdb |
|
695 | 694 | flag and does not actually invoke the debugger if the flag is false. |
|
696 | 695 | The 'force' option forces the debugger to activate even if the flag |
|
697 | 696 | is false. |
|
698 | 697 | """ |
|
699 | 698 | |
|
700 | 699 | if not (force or self.call_pdb): |
|
701 | 700 | return |
|
702 | 701 | |
|
703 | 702 | if not hasattr(sys,'last_traceback'): |
|
704 | 703 | error('No traceback has been produced, nothing to debug.') |
|
705 | 704 | return |
|
706 | 705 | |
|
707 | 706 | # use pydb if available |
|
708 | 707 | if debugger.has_pydb: |
|
709 | 708 | from pydb import pm |
|
710 | 709 | else: |
|
711 | 710 | # fallback to our internal debugger |
|
712 | 711 | pm = lambda : self.InteractiveTB.debugger(force=True) |
|
713 | 712 | self.history_saving_wrapper(pm)() |
|
714 | 713 | |
|
715 | 714 | #------------------------------------------------------------------------- |
|
716 | 715 | # Things related to IPython's various namespaces |
|
717 | 716 | #------------------------------------------------------------------------- |
|
718 | 717 | |
|
719 | 718 | def init_create_namespaces(self, user_ns=None, user_global_ns=None): |
|
720 | 719 | # Create the namespace where the user will operate. user_ns is |
|
721 | 720 | # normally the only one used, and it is passed to the exec calls as |
|
722 | 721 | # the locals argument. But we do carry a user_global_ns namespace |
|
723 | 722 | # given as the exec 'globals' argument, This is useful in embedding |
|
724 | 723 | # situations where the ipython shell opens in a context where the |
|
725 | 724 | # distinction between locals and globals is meaningful. For |
|
726 | 725 | # non-embedded contexts, it is just the same object as the user_ns dict. |
|
727 | 726 | |
|
728 | 727 | # FIXME. For some strange reason, __builtins__ is showing up at user |
|
729 | 728 | # level as a dict instead of a module. This is a manual fix, but I |
|
730 | 729 | # should really track down where the problem is coming from. Alex |
|
731 | 730 | # Schmolck reported this problem first. |
|
732 | 731 | |
|
733 | 732 | # A useful post by Alex Martelli on this topic: |
|
734 | 733 | # Re: inconsistent value from __builtins__ |
|
735 | 734 | # Von: Alex Martelli <aleaxit@yahoo.com> |
|
736 | 735 | # Datum: Freitag 01 Oktober 2004 04:45:34 nachmittags/abends |
|
737 | 736 | # Gruppen: comp.lang.python |
|
738 | 737 | |
|
739 | 738 | # Michael Hohn <hohn@hooknose.lbl.gov> wrote: |
|
740 | 739 | # > >>> print type(builtin_check.get_global_binding('__builtins__')) |
|
741 | 740 | # > <type 'dict'> |
|
742 | 741 | # > >>> print type(__builtins__) |
|
743 | 742 | # > <type 'module'> |
|
744 | 743 | # > Is this difference in return value intentional? |
|
745 | 744 | |
|
746 | 745 | # Well, it's documented that '__builtins__' can be either a dictionary |
|
747 | 746 | # or a module, and it's been that way for a long time. Whether it's |
|
748 | 747 | # intentional (or sensible), I don't know. In any case, the idea is |
|
749 | 748 | # that if you need to access the built-in namespace directly, you |
|
750 | 749 | # should start with "import __builtin__" (note, no 's') which will |
|
751 | 750 | # definitely give you a module. Yeah, it's somewhat confusing:-(. |
|
752 | 751 | |
|
753 | 752 | # These routines return properly built dicts as needed by the rest of |
|
754 | 753 | # the code, and can also be used by extension writers to generate |
|
755 | 754 | # properly initialized namespaces. |
|
756 | 755 | user_ns, user_global_ns = self.make_user_namespaces(user_ns, |
|
757 | 756 | user_global_ns) |
|
758 | 757 | |
|
759 | 758 | # Assign namespaces |
|
760 | 759 | # This is the namespace where all normal user variables live |
|
761 | 760 | self.user_ns = user_ns |
|
762 | 761 | self.user_global_ns = user_global_ns |
|
763 | 762 | |
|
764 | 763 | # An auxiliary namespace that checks what parts of the user_ns were |
|
765 | 764 | # loaded at startup, so we can list later only variables defined in |
|
766 | 765 | # actual interactive use. Since it is always a subset of user_ns, it |
|
767 | 766 | # doesn't need to be separately tracked in the ns_table. |
|
768 | 767 | self.user_ns_hidden = {} |
|
769 | 768 | |
|
770 | 769 | # A namespace to keep track of internal data structures to prevent |
|
771 | 770 | # them from cluttering user-visible stuff. Will be updated later |
|
772 | 771 | self.internal_ns = {} |
|
773 | 772 | |
|
774 | 773 | # Now that FakeModule produces a real module, we've run into a nasty |
|
775 | 774 | # problem: after script execution (via %run), the module where the user |
|
776 | 775 | # code ran is deleted. Now that this object is a true module (needed |
|
777 | 776 | # so docetst and other tools work correctly), the Python module |
|
778 | 777 | # teardown mechanism runs over it, and sets to None every variable |
|
779 | 778 | # present in that module. Top-level references to objects from the |
|
780 | 779 | # script survive, because the user_ns is updated with them. However, |
|
781 | 780 | # calling functions defined in the script that use other things from |
|
782 | 781 | # the script will fail, because the function's closure had references |
|
783 | 782 | # to the original objects, which are now all None. So we must protect |
|
784 | 783 | # these modules from deletion by keeping a cache. |
|
785 | 784 | # |
|
786 | 785 | # To avoid keeping stale modules around (we only need the one from the |
|
787 | 786 | # last run), we use a dict keyed with the full path to the script, so |
|
788 | 787 | # only the last version of the module is held in the cache. Note, |
|
789 | 788 | # however, that we must cache the module *namespace contents* (their |
|
790 | 789 | # __dict__). Because if we try to cache the actual modules, old ones |
|
791 | 790 | # (uncached) could be destroyed while still holding references (such as |
|
792 | 791 | # those held by GUI objects that tend to be long-lived)> |
|
793 | 792 | # |
|
794 | 793 | # The %reset command will flush this cache. See the cache_main_mod() |
|
795 | 794 | # and clear_main_mod_cache() methods for details on use. |
|
796 | 795 | |
|
797 | 796 | # This is the cache used for 'main' namespaces |
|
798 | 797 | self._main_ns_cache = {} |
|
799 | 798 | # And this is the single instance of FakeModule whose __dict__ we keep |
|
800 | 799 | # copying and clearing for reuse on each %run |
|
801 | 800 | self._user_main_module = FakeModule() |
|
802 | 801 | |
|
803 | 802 | # A table holding all the namespaces IPython deals with, so that |
|
804 | 803 | # introspection facilities can search easily. |
|
805 | 804 | self.ns_table = {'user':user_ns, |
|
806 | 805 | 'user_global':user_global_ns, |
|
807 | 806 | 'internal':self.internal_ns, |
|
808 | 807 | 'builtin':__builtin__.__dict__ |
|
809 | 808 | } |
|
810 | 809 | |
|
811 | 810 | # Similarly, track all namespaces where references can be held and that |
|
812 | 811 | # we can safely clear (so it can NOT include builtin). This one can be |
|
813 | 812 | # a simple list. Note that the main execution namespaces, user_ns and |
|
814 | 813 | # user_global_ns, can NOT be listed here, as clearing them blindly |
|
815 | 814 | # causes errors in object __del__ methods. Instead, the reset() method |
|
816 | 815 | # clears them manually and carefully. |
|
817 | 816 | self.ns_refs_table = [ self.user_ns_hidden, |
|
818 | 817 | self.internal_ns, self._main_ns_cache ] |
|
819 | 818 | |
|
820 | 819 | def make_user_namespaces(self, user_ns=None, user_global_ns=None): |
|
821 | 820 | """Return a valid local and global user interactive namespaces. |
|
822 | 821 | |
|
823 | 822 | This builds a dict with the minimal information needed to operate as a |
|
824 | 823 | valid IPython user namespace, which you can pass to the various |
|
825 | 824 | embedding classes in ipython. The default implementation returns the |
|
826 | 825 | same dict for both the locals and the globals to allow functions to |
|
827 | 826 | refer to variables in the namespace. Customized implementations can |
|
828 | 827 | return different dicts. The locals dictionary can actually be anything |
|
829 | 828 | following the basic mapping protocol of a dict, but the globals dict |
|
830 | 829 | must be a true dict, not even a subclass. It is recommended that any |
|
831 | 830 | custom object for the locals namespace synchronize with the globals |
|
832 | 831 | dict somehow. |
|
833 | 832 | |
|
834 | 833 | Raises TypeError if the provided globals namespace is not a true dict. |
|
835 | 834 | |
|
836 | 835 | Parameters |
|
837 | 836 | ---------- |
|
838 | 837 | user_ns : dict-like, optional |
|
839 | 838 | The current user namespace. The items in this namespace should |
|
840 | 839 | be included in the output. If None, an appropriate blank |
|
841 | 840 | namespace should be created. |
|
842 | 841 | user_global_ns : dict, optional |
|
843 | 842 | The current user global namespace. The items in this namespace |
|
844 | 843 | should be included in the output. If None, an appropriate |
|
845 | 844 | blank namespace should be created. |
|
846 | 845 | |
|
847 | 846 | Returns |
|
848 | 847 | ------- |
|
849 | 848 | A pair of dictionary-like object to be used as the local namespace |
|
850 | 849 | of the interpreter and a dict to be used as the global namespace. |
|
851 | 850 | """ |
|
852 | 851 | |
|
853 | 852 | |
|
854 | 853 | # We must ensure that __builtin__ (without the final 's') is always |
|
855 | 854 | # available and pointing to the __builtin__ *module*. For more details: |
|
856 | 855 | # http://mail.python.org/pipermail/python-dev/2001-April/014068.html |
|
857 | 856 | |
|
858 | 857 | if user_ns is None: |
|
859 | 858 | # Set __name__ to __main__ to better match the behavior of the |
|
860 | 859 | # normal interpreter. |
|
861 | 860 | user_ns = {'__name__' :'__main__', |
|
862 | 861 | '__builtin__' : __builtin__, |
|
863 | 862 | '__builtins__' : __builtin__, |
|
864 | 863 | } |
|
865 | 864 | else: |
|
866 | 865 | user_ns.setdefault('__name__','__main__') |
|
867 | 866 | user_ns.setdefault('__builtin__',__builtin__) |
|
868 | 867 | user_ns.setdefault('__builtins__',__builtin__) |
|
869 | 868 | |
|
870 | 869 | if user_global_ns is None: |
|
871 | 870 | user_global_ns = user_ns |
|
872 | 871 | if type(user_global_ns) is not dict: |
|
873 | 872 | raise TypeError("user_global_ns must be a true dict; got %r" |
|
874 | 873 | % type(user_global_ns)) |
|
875 | 874 | |
|
876 | 875 | return user_ns, user_global_ns |
|
877 | 876 | |
|
878 | 877 | def init_sys_modules(self): |
|
879 | 878 | # We need to insert into sys.modules something that looks like a |
|
880 | 879 | # module but which accesses the IPython namespace, for shelve and |
|
881 | 880 | # pickle to work interactively. Normally they rely on getting |
|
882 | 881 | # everything out of __main__, but for embedding purposes each IPython |
|
883 | 882 | # instance has its own private namespace, so we can't go shoving |
|
884 | 883 | # everything into __main__. |
|
885 | 884 | |
|
886 | 885 | # note, however, that we should only do this for non-embedded |
|
887 | 886 | # ipythons, which really mimic the __main__.__dict__ with their own |
|
888 | 887 | # namespace. Embedded instances, on the other hand, should not do |
|
889 | 888 | # this because they need to manage the user local/global namespaces |
|
890 | 889 | # only, but they live within a 'normal' __main__ (meaning, they |
|
891 | 890 | # shouldn't overtake the execution environment of the script they're |
|
892 | 891 | # embedded in). |
|
893 | 892 | |
|
894 | 893 | # This is overridden in the InteractiveShellEmbed subclass to a no-op. |
|
895 | 894 | |
|
896 | 895 | try: |
|
897 | 896 | main_name = self.user_ns['__name__'] |
|
898 | 897 | except KeyError: |
|
899 | 898 | raise KeyError('user_ns dictionary MUST have a "__name__" key') |
|
900 | 899 | else: |
|
901 | 900 | sys.modules[main_name] = FakeModule(self.user_ns) |
|
902 | 901 | |
|
903 | 902 | def init_user_ns(self): |
|
904 | 903 | """Initialize all user-visible namespaces to their minimum defaults. |
|
905 | 904 | |
|
906 | 905 | Certain history lists are also initialized here, as they effectively |
|
907 | 906 | act as user namespaces. |
|
908 | 907 | |
|
909 | 908 | Notes |
|
910 | 909 | ----- |
|
911 | 910 | All data structures here are only filled in, they are NOT reset by this |
|
912 | 911 | method. If they were not empty before, data will simply be added to |
|
913 | 912 | therm. |
|
914 | 913 | """ |
|
915 | 914 | # This function works in two parts: first we put a few things in |
|
916 | 915 | # user_ns, and we sync that contents into user_ns_hidden so that these |
|
917 | 916 | # initial variables aren't shown by %who. After the sync, we add the |
|
918 | 917 | # rest of what we *do* want the user to see with %who even on a new |
|
919 | 918 | # session (probably nothing, so theye really only see their own stuff) |
|
920 | 919 | |
|
921 | 920 | # The user dict must *always* have a __builtin__ reference to the |
|
922 | 921 | # Python standard __builtin__ namespace, which must be imported. |
|
923 | 922 | # This is so that certain operations in prompt evaluation can be |
|
924 | 923 | # reliably executed with builtins. Note that we can NOT use |
|
925 | 924 | # __builtins__ (note the 's'), because that can either be a dict or a |
|
926 | 925 | # module, and can even mutate at runtime, depending on the context |
|
927 | 926 | # (Python makes no guarantees on it). In contrast, __builtin__ is |
|
928 | 927 | # always a module object, though it must be explicitly imported. |
|
929 | 928 | |
|
930 | 929 | # For more details: |
|
931 | 930 | # http://mail.python.org/pipermail/python-dev/2001-April/014068.html |
|
932 | 931 | ns = dict(__builtin__ = __builtin__) |
|
933 | 932 | |
|
934 | 933 | # Put 'help' in the user namespace |
|
935 | 934 | try: |
|
936 | 935 | from site import _Helper |
|
937 | 936 | ns['help'] = _Helper() |
|
938 | 937 | except ImportError: |
|
939 | 938 | warn('help() not available - check site.py') |
|
940 | 939 | |
|
941 | 940 | # make global variables for user access to the histories |
|
942 | 941 | ns['_ih'] = self.input_hist |
|
943 | 942 | ns['_oh'] = self.output_hist |
|
944 | 943 | ns['_dh'] = self.dir_hist |
|
945 | 944 | |
|
946 | 945 | ns['_sh'] = shadowns |
|
947 | 946 | |
|
948 | 947 | # user aliases to input and output histories. These shouldn't show up |
|
949 | 948 | # in %who, as they can have very large reprs. |
|
950 | 949 | ns['In'] = self.input_hist |
|
951 | 950 | ns['Out'] = self.output_hist |
|
952 | 951 | |
|
953 | 952 | # Store myself as the public api!!! |
|
954 | 953 | ns['get_ipython'] = self.get_ipython |
|
955 | 954 | |
|
956 | 955 | # Sync what we've added so far to user_ns_hidden so these aren't seen |
|
957 | 956 | # by %who |
|
958 | 957 | self.user_ns_hidden.update(ns) |
|
959 | 958 | |
|
960 | 959 | # Anything put into ns now would show up in %who. Think twice before |
|
961 | 960 | # putting anything here, as we really want %who to show the user their |
|
962 | 961 | # stuff, not our variables. |
|
963 | 962 | |
|
964 | 963 | # Finally, update the real user's namespace |
|
965 | 964 | self.user_ns.update(ns) |
|
966 | 965 | |
|
967 | 966 | |
|
968 | 967 | def reset(self): |
|
969 | 968 | """Clear all internal namespaces. |
|
970 | 969 | |
|
971 | 970 | Note that this is much more aggressive than %reset, since it clears |
|
972 | 971 | fully all namespaces, as well as all input/output lists. |
|
973 | 972 | """ |
|
974 | 973 | self.alias_manager.clear_aliases() |
|
975 | 974 | |
|
976 | 975 | # Clear input and output histories |
|
977 | 976 | self.input_hist[:] = [] |
|
978 | 977 | self.input_hist_raw[:] = [] |
|
979 | 978 | self.output_hist.clear() |
|
980 | 979 | |
|
981 | 980 | # Clear namespaces holding user references |
|
982 | 981 | for ns in self.ns_refs_table: |
|
983 | 982 | ns.clear() |
|
984 | 983 | |
|
985 | 984 | # The main execution namespaces must be cleared very carefully, |
|
986 | 985 | # skipping the deletion of the builtin-related keys, because doing so |
|
987 | 986 | # would cause errors in many object's __del__ methods. |
|
988 | 987 | for ns in [self.user_ns, self.user_global_ns]: |
|
989 | 988 | drop_keys = set(ns.keys()) |
|
990 | 989 | drop_keys.discard('__builtin__') |
|
991 | 990 | drop_keys.discard('__builtins__') |
|
992 | 991 | for k in drop_keys: |
|
993 | 992 | del ns[k] |
|
994 | 993 | |
|
995 | 994 | # Restore the user namespaces to minimal usability |
|
996 | 995 | self.init_user_ns() |
|
997 | 996 | |
|
998 | 997 | # Restore the default and user aliases |
|
999 | 998 | self.alias_manager.init_aliases() |
|
1000 | 999 | |
|
1001 | 1000 | def reset_selective(self, regex=None): |
|
1002 | 1001 | """Clear selective variables from internal namespaces based on a |
|
1003 | 1002 | specified regular expression. |
|
1004 | 1003 | |
|
1005 | 1004 | Parameters |
|
1006 | 1005 | ---------- |
|
1007 | 1006 | regex : string or compiled pattern, optional |
|
1008 | 1007 | A regular expression pattern that will be used in searching |
|
1009 | 1008 | variable names in the users namespaces. |
|
1010 | 1009 | """ |
|
1011 | 1010 | if regex is not None: |
|
1012 | 1011 | try: |
|
1013 | 1012 | m = re.compile(regex) |
|
1014 | 1013 | except TypeError: |
|
1015 | 1014 | raise TypeError('regex must be a string or compiled pattern') |
|
1016 | 1015 | # Search for keys in each namespace that match the given regex |
|
1017 | 1016 | # If a match is found, delete the key/value pair. |
|
1018 | 1017 | for ns in self.ns_refs_table: |
|
1019 | 1018 | for var in ns: |
|
1020 | 1019 | if m.search(var): |
|
1021 | 1020 | del ns[var] |
|
1022 | 1021 | |
|
1023 | 1022 | def push(self, variables, interactive=True): |
|
1024 | 1023 | """Inject a group of variables into the IPython user namespace. |
|
1025 | 1024 | |
|
1026 | 1025 | Parameters |
|
1027 | 1026 | ---------- |
|
1028 | 1027 | variables : dict, str or list/tuple of str |
|
1029 | 1028 | The variables to inject into the user's namespace. If a dict, a |
|
1030 | 1029 | simple update is done. If a str, the string is assumed to have |
|
1031 | 1030 | variable names separated by spaces. A list/tuple of str can also |
|
1032 | 1031 | be used to give the variable names. If just the variable names are |
|
1033 | 1032 | give (list/tuple/str) then the variable values looked up in the |
|
1034 | 1033 | callers frame. |
|
1035 | 1034 | interactive : bool |
|
1036 | 1035 | If True (default), the variables will be listed with the ``who`` |
|
1037 | 1036 | magic. |
|
1038 | 1037 | """ |
|
1039 | 1038 | vdict = None |
|
1040 | 1039 | |
|
1041 | 1040 | # We need a dict of name/value pairs to do namespace updates. |
|
1042 | 1041 | if isinstance(variables, dict): |
|
1043 | 1042 | vdict = variables |
|
1044 | 1043 | elif isinstance(variables, (basestring, list, tuple)): |
|
1045 | 1044 | if isinstance(variables, basestring): |
|
1046 | 1045 | vlist = variables.split() |
|
1047 | 1046 | else: |
|
1048 | 1047 | vlist = variables |
|
1049 | 1048 | vdict = {} |
|
1050 | 1049 | cf = sys._getframe(1) |
|
1051 | 1050 | for name in vlist: |
|
1052 | 1051 | try: |
|
1053 | 1052 | vdict[name] = eval(name, cf.f_globals, cf.f_locals) |
|
1054 | 1053 | except: |
|
1055 | 1054 | print ('Could not get variable %s from %s' % |
|
1056 | 1055 | (name,cf.f_code.co_name)) |
|
1057 | 1056 | else: |
|
1058 | 1057 | raise ValueError('variables must be a dict/str/list/tuple') |
|
1059 | 1058 | |
|
1060 | 1059 | # Propagate variables to user namespace |
|
1061 | 1060 | self.user_ns.update(vdict) |
|
1062 | 1061 | |
|
1063 | 1062 | # And configure interactive visibility |
|
1064 | 1063 | config_ns = self.user_ns_hidden |
|
1065 | 1064 | if interactive: |
|
1066 | 1065 | for name, val in vdict.iteritems(): |
|
1067 | 1066 | config_ns.pop(name, None) |
|
1068 | 1067 | else: |
|
1069 | 1068 | for name,val in vdict.iteritems(): |
|
1070 | 1069 | config_ns[name] = val |
|
1071 | 1070 | |
|
1072 | 1071 | #------------------------------------------------------------------------- |
|
1073 | 1072 | # Things related to object introspection |
|
1074 | 1073 | #------------------------------------------------------------------------- |
|
1075 | 1074 | |
|
1076 | 1075 | def _ofind(self, oname, namespaces=None): |
|
1077 | 1076 | """Find an object in the available namespaces. |
|
1078 | 1077 | |
|
1079 | 1078 | self._ofind(oname) -> dict with keys: found,obj,ospace,ismagic |
|
1080 | 1079 | |
|
1081 | 1080 | Has special code to detect magic functions. |
|
1082 | 1081 | """ |
|
1083 | 1082 | #oname = oname.strip() |
|
1084 | 1083 | #print '1- oname: <%r>' % oname # dbg |
|
1085 | 1084 | try: |
|
1086 | 1085 | oname = oname.strip().encode('ascii') |
|
1087 | 1086 | #print '2- oname: <%r>' % oname # dbg |
|
1088 | 1087 | except UnicodeEncodeError: |
|
1089 | 1088 | print 'Python identifiers can only contain ascii characters.' |
|
1090 | 1089 | return dict(found=False) |
|
1091 | 1090 | |
|
1092 | 1091 | alias_ns = None |
|
1093 | 1092 | if namespaces is None: |
|
1094 | 1093 | # Namespaces to search in: |
|
1095 | 1094 | # Put them in a list. The order is important so that we |
|
1096 | 1095 | # find things in the same order that Python finds them. |
|
1097 | 1096 | namespaces = [ ('Interactive', self.user_ns), |
|
1098 | 1097 | ('IPython internal', self.internal_ns), |
|
1099 | 1098 | ('Python builtin', __builtin__.__dict__), |
|
1100 | 1099 | ('Alias', self.alias_manager.alias_table), |
|
1101 | 1100 | ] |
|
1102 | 1101 | alias_ns = self.alias_manager.alias_table |
|
1103 | 1102 | |
|
1104 | 1103 | # initialize results to 'null' |
|
1105 | 1104 | found = False; obj = None; ospace = None; ds = None; |
|
1106 | 1105 | ismagic = False; isalias = False; parent = None |
|
1107 | 1106 | |
|
1108 | 1107 | # We need to special-case 'print', which as of python2.6 registers as a |
|
1109 | 1108 | # function but should only be treated as one if print_function was |
|
1110 | 1109 | # loaded with a future import. In this case, just bail. |
|
1111 | 1110 | if (oname == 'print' and not (self.compile.compiler.flags & |
|
1112 | 1111 | __future__.CO_FUTURE_PRINT_FUNCTION)): |
|
1113 | 1112 | return {'found':found, 'obj':obj, 'namespace':ospace, |
|
1114 | 1113 | 'ismagic':ismagic, 'isalias':isalias, 'parent':parent} |
|
1115 | 1114 | |
|
1116 | 1115 | # Look for the given name by splitting it in parts. If the head is |
|
1117 | 1116 | # found, then we look for all the remaining parts as members, and only |
|
1118 | 1117 | # declare success if we can find them all. |
|
1119 | 1118 | oname_parts = oname.split('.') |
|
1120 | 1119 | oname_head, oname_rest = oname_parts[0],oname_parts[1:] |
|
1121 | 1120 | for nsname,ns in namespaces: |
|
1122 | 1121 | try: |
|
1123 | 1122 | obj = ns[oname_head] |
|
1124 | 1123 | except KeyError: |
|
1125 | 1124 | continue |
|
1126 | 1125 | else: |
|
1127 | 1126 | #print 'oname_rest:', oname_rest # dbg |
|
1128 | 1127 | for part in oname_rest: |
|
1129 | 1128 | try: |
|
1130 | 1129 | parent = obj |
|
1131 | 1130 | obj = getattr(obj,part) |
|
1132 | 1131 | except: |
|
1133 | 1132 | # Blanket except b/c some badly implemented objects |
|
1134 | 1133 | # allow __getattr__ to raise exceptions other than |
|
1135 | 1134 | # AttributeError, which then crashes IPython. |
|
1136 | 1135 | break |
|
1137 | 1136 | else: |
|
1138 | 1137 | # If we finish the for loop (no break), we got all members |
|
1139 | 1138 | found = True |
|
1140 | 1139 | ospace = nsname |
|
1141 | 1140 | if ns == alias_ns: |
|
1142 | 1141 | isalias = True |
|
1143 | 1142 | break # namespace loop |
|
1144 | 1143 | |
|
1145 | 1144 | # Try to see if it's magic |
|
1146 | 1145 | if not found: |
|
1147 | 1146 | if oname.startswith(ESC_MAGIC): |
|
1148 | 1147 | oname = oname[1:] |
|
1149 | 1148 | obj = getattr(self,'magic_'+oname,None) |
|
1150 | 1149 | if obj is not None: |
|
1151 | 1150 | found = True |
|
1152 | 1151 | ospace = 'IPython internal' |
|
1153 | 1152 | ismagic = True |
|
1154 | 1153 | |
|
1155 | 1154 | # Last try: special-case some literals like '', [], {}, etc: |
|
1156 | 1155 | if not found and oname_head in ["''",'""','[]','{}','()']: |
|
1157 | 1156 | obj = eval(oname_head) |
|
1158 | 1157 | found = True |
|
1159 | 1158 | ospace = 'Interactive' |
|
1160 | 1159 | |
|
1161 | 1160 | return {'found':found, 'obj':obj, 'namespace':ospace, |
|
1162 | 1161 | 'ismagic':ismagic, 'isalias':isalias, 'parent':parent} |
|
1163 | 1162 | |
|
1164 | 1163 | def _ofind_property(self, oname, info): |
|
1165 | 1164 | """Second part of object finding, to look for property details.""" |
|
1166 | 1165 | if info.found: |
|
1167 | 1166 | # Get the docstring of the class property if it exists. |
|
1168 | 1167 | path = oname.split('.') |
|
1169 | 1168 | root = '.'.join(path[:-1]) |
|
1170 | 1169 | if info.parent is not None: |
|
1171 | 1170 | try: |
|
1172 | 1171 | target = getattr(info.parent, '__class__') |
|
1173 | 1172 | # The object belongs to a class instance. |
|
1174 | 1173 | try: |
|
1175 | 1174 | target = getattr(target, path[-1]) |
|
1176 | 1175 | # The class defines the object. |
|
1177 | 1176 | if isinstance(target, property): |
|
1178 | 1177 | oname = root + '.__class__.' + path[-1] |
|
1179 | 1178 | info = Struct(self._ofind(oname)) |
|
1180 | 1179 | except AttributeError: pass |
|
1181 | 1180 | except AttributeError: pass |
|
1182 | 1181 | |
|
1183 | 1182 | # We return either the new info or the unmodified input if the object |
|
1184 | 1183 | # hadn't been found |
|
1185 | 1184 | return info |
|
1186 | 1185 | |
|
1187 | 1186 | def _object_find(self, oname, namespaces=None): |
|
1188 | 1187 | """Find an object and return a struct with info about it.""" |
|
1189 | 1188 | inf = Struct(self._ofind(oname, namespaces)) |
|
1190 | 1189 | return Struct(self._ofind_property(oname, inf)) |
|
1191 | 1190 | |
|
1192 | 1191 | def _inspect(self, meth, oname, namespaces=None, **kw): |
|
1193 | 1192 | """Generic interface to the inspector system. |
|
1194 | 1193 | |
|
1195 | 1194 | This function is meant to be called by pdef, pdoc & friends.""" |
|
1196 | 1195 | info = self._object_find(oname) |
|
1197 | 1196 | if info.found: |
|
1198 | 1197 | pmethod = getattr(self.inspector, meth) |
|
1199 | 1198 | formatter = format_screen if info.ismagic else None |
|
1200 | 1199 | if meth == 'pdoc': |
|
1201 | 1200 | pmethod(info.obj, oname, formatter) |
|
1202 | 1201 | elif meth == 'pinfo': |
|
1203 | 1202 | pmethod(info.obj, oname, formatter, info, **kw) |
|
1204 | 1203 | else: |
|
1205 | 1204 | pmethod(info.obj, oname) |
|
1206 | 1205 | else: |
|
1207 | 1206 | print 'Object `%s` not found.' % oname |
|
1208 | 1207 | return 'not found' # so callers can take other action |
|
1209 | 1208 | |
|
1210 | 1209 | def object_inspect(self, oname): |
|
1211 | 1210 | info = self._object_find(oname) |
|
1212 | 1211 | if info.found: |
|
1213 | 1212 | return self.inspector.info(info.obj, oname, info=info) |
|
1214 | 1213 | else: |
|
1215 | 1214 | return oinspect.object_info(name=oname, found=False) |
|
1216 | 1215 | |
|
1217 | 1216 | #------------------------------------------------------------------------- |
|
1218 | 1217 | # Things related to history management |
|
1219 | 1218 | #------------------------------------------------------------------------- |
|
1220 | 1219 | |
|
1221 | 1220 | def init_history(self): |
|
1222 | 1221 | # List of input with multi-line handling. |
|
1223 | 1222 | self.input_hist = InputList() |
|
1224 | 1223 | # This one will hold the 'raw' input history, without any |
|
1225 | 1224 | # pre-processing. This will allow users to retrieve the input just as |
|
1226 | 1225 | # it was exactly typed in by the user, with %hist -r. |
|
1227 | 1226 | self.input_hist_raw = InputList() |
|
1228 | 1227 | |
|
1229 | 1228 | # list of visited directories |
|
1230 | 1229 | try: |
|
1231 | 1230 | self.dir_hist = [os.getcwd()] |
|
1232 | 1231 | except OSError: |
|
1233 | 1232 | self.dir_hist = [] |
|
1234 | 1233 | |
|
1235 | 1234 | # dict of output history |
|
1236 | 1235 | self.output_hist = {} |
|
1237 | 1236 | |
|
1238 | 1237 | # Now the history file |
|
1239 | 1238 | if self.profile: |
|
1240 | 1239 | histfname = 'history-%s' % self.profile |
|
1241 | 1240 | else: |
|
1242 | 1241 | histfname = 'history' |
|
1243 | 1242 | self.histfile = os.path.join(self.ipython_dir, histfname) |
|
1244 | 1243 | |
|
1245 | 1244 | # Fill the history zero entry, user counter starts at 1 |
|
1246 | 1245 | self.input_hist.append('\n') |
|
1247 | 1246 | self.input_hist_raw.append('\n') |
|
1248 | 1247 | |
|
1249 | 1248 | def init_shadow_hist(self): |
|
1250 | 1249 | try: |
|
1251 | 1250 | self.db = pickleshare.PickleShareDB(self.ipython_dir + "/db") |
|
1252 |
except |
|
|
1251 | except UnicodeDecodeError: | |
|
1253 | 1252 | print "Your ipython_dir can't be decoded to unicode!" |
|
1254 | 1253 | print "Please set HOME environment variable to something that" |
|
1255 | 1254 | print r"only has ASCII characters, e.g. c:\home" |
|
1256 | 1255 | print "Now it is", self.ipython_dir |
|
1257 | 1256 | sys.exit() |
|
1258 | 1257 | self.shadowhist = ipcorehist.ShadowHist(self.db) |
|
1259 | 1258 | |
|
1260 | 1259 | def savehist(self): |
|
1261 | 1260 | """Save input history to a file (via readline library).""" |
|
1262 | 1261 | |
|
1263 | 1262 | try: |
|
1264 | 1263 | self.readline.write_history_file(self.histfile) |
|
1265 | 1264 | except: |
|
1266 | 1265 | print 'Unable to save IPython command history to file: ' + \ |
|
1267 | 1266 | `self.histfile` |
|
1268 | 1267 | |
|
1269 | 1268 | def reloadhist(self): |
|
1270 | 1269 | """Reload the input history from disk file.""" |
|
1271 | 1270 | |
|
1272 | 1271 | try: |
|
1273 | 1272 | self.readline.clear_history() |
|
1274 | 1273 | self.readline.read_history_file(self.shell.histfile) |
|
1275 | 1274 | except AttributeError: |
|
1276 | 1275 | pass |
|
1277 | 1276 | |
|
1278 | 1277 | def history_saving_wrapper(self, func): |
|
1279 | 1278 | """ Wrap func for readline history saving |
|
1280 | 1279 | |
|
1281 | 1280 | Convert func into callable that saves & restores |
|
1282 | 1281 | history around the call """ |
|
1283 | 1282 | |
|
1284 | 1283 | if self.has_readline: |
|
1285 | 1284 | from IPython.utils import rlineimpl as readline |
|
1286 | 1285 | else: |
|
1287 | 1286 | return func |
|
1288 | 1287 | |
|
1289 | 1288 | def wrapper(): |
|
1290 | 1289 | self.savehist() |
|
1291 | 1290 | try: |
|
1292 | 1291 | func() |
|
1293 | 1292 | finally: |
|
1294 | 1293 | readline.read_history_file(self.histfile) |
|
1295 | 1294 | return wrapper |
|
1296 | 1295 | |
|
1297 | 1296 | def get_history(self, index=None, raw=False, output=True): |
|
1298 | 1297 | """Get the history list. |
|
1299 | 1298 | |
|
1300 | 1299 | Get the input and output history. |
|
1301 | 1300 | |
|
1302 | 1301 | Parameters |
|
1303 | 1302 | ---------- |
|
1304 | 1303 | index : n or (n1, n2) or None |
|
1305 | 1304 | If n, then the last entries. If a tuple, then all in |
|
1306 | 1305 | range(n1, n2). If None, then all entries. Raises IndexError if |
|
1307 | 1306 | the format of index is incorrect. |
|
1308 | 1307 | raw : bool |
|
1309 | 1308 | If True, return the raw input. |
|
1310 | 1309 | output : bool |
|
1311 | 1310 | If True, then return the output as well. |
|
1312 | 1311 | |
|
1313 | 1312 | Returns |
|
1314 | 1313 | ------- |
|
1315 | 1314 | If output is True, then return a dict of tuples, keyed by the prompt |
|
1316 | 1315 | numbers and with values of (input, output). If output is False, then |
|
1317 | 1316 | a dict, keyed by the prompt number with the values of input. Raises |
|
1318 | 1317 | IndexError if no history is found. |
|
1319 | 1318 | """ |
|
1320 | 1319 | if raw: |
|
1321 | 1320 | input_hist = self.input_hist_raw |
|
1322 | 1321 | else: |
|
1323 | 1322 | input_hist = self.input_hist |
|
1324 | 1323 | if output: |
|
1325 | 1324 | output_hist = self.user_ns['Out'] |
|
1326 | 1325 | n = len(input_hist) |
|
1327 | 1326 | if index is None: |
|
1328 | 1327 | start=0; stop=n |
|
1329 | 1328 | elif isinstance(index, int): |
|
1330 | 1329 | start=n-index; stop=n |
|
1331 | 1330 | elif isinstance(index, tuple) and len(index) == 2: |
|
1332 | 1331 | start=index[0]; stop=index[1] |
|
1333 | 1332 | else: |
|
1334 | 1333 | raise IndexError('Not a valid index for the input history: %r' |
|
1335 | 1334 | % index) |
|
1336 | 1335 | hist = {} |
|
1337 | 1336 | for i in range(start, stop): |
|
1338 | 1337 | if output: |
|
1339 | 1338 | hist[i] = (input_hist[i], output_hist.get(i)) |
|
1340 | 1339 | else: |
|
1341 | 1340 | hist[i] = input_hist[i] |
|
1342 | 1341 | if len(hist)==0: |
|
1343 | 1342 | raise IndexError('No history for range of indices: %r' % index) |
|
1344 | 1343 | return hist |
|
1345 | 1344 | |
|
1346 | 1345 | #------------------------------------------------------------------------- |
|
1347 | 1346 | # Things related to exception handling and tracebacks (not debugging) |
|
1348 | 1347 | #------------------------------------------------------------------------- |
|
1349 | 1348 | |
|
1350 | 1349 | def init_traceback_handlers(self, custom_exceptions): |
|
1351 | 1350 | # Syntax error handler. |
|
1352 | 1351 | self.SyntaxTB = ultratb.SyntaxTB(color_scheme='NoColor') |
|
1353 | 1352 | |
|
1354 | 1353 | # The interactive one is initialized with an offset, meaning we always |
|
1355 | 1354 | # want to remove the topmost item in the traceback, which is our own |
|
1356 | 1355 | # internal code. Valid modes: ['Plain','Context','Verbose'] |
|
1357 | 1356 | self.InteractiveTB = ultratb.AutoFormattedTB(mode = 'Plain', |
|
1358 | 1357 | color_scheme='NoColor', |
|
1359 | 1358 | tb_offset = 1) |
|
1360 | 1359 | |
|
1361 | 1360 | # The instance will store a pointer to the system-wide exception hook, |
|
1362 | 1361 | # so that runtime code (such as magics) can access it. This is because |
|
1363 | 1362 | # during the read-eval loop, it may get temporarily overwritten. |
|
1364 | 1363 | self.sys_excepthook = sys.excepthook |
|
1365 | 1364 | |
|
1366 | 1365 | # and add any custom exception handlers the user may have specified |
|
1367 | 1366 | self.set_custom_exc(*custom_exceptions) |
|
1368 | 1367 | |
|
1369 | 1368 | # Set the exception mode |
|
1370 | 1369 | self.InteractiveTB.set_mode(mode=self.xmode) |
|
1371 | 1370 | |
|
1372 | 1371 | def set_custom_exc(self, exc_tuple, handler): |
|
1373 | 1372 | """set_custom_exc(exc_tuple,handler) |
|
1374 | 1373 | |
|
1375 | 1374 | Set a custom exception handler, which will be called if any of the |
|
1376 | 1375 | exceptions in exc_tuple occur in the mainloop (specifically, in the |
|
1377 | 1376 | runcode() method. |
|
1378 | 1377 | |
|
1379 | 1378 | Inputs: |
|
1380 | 1379 | |
|
1381 | 1380 | - exc_tuple: a *tuple* of valid exceptions to call the defined |
|
1382 | 1381 | handler for. It is very important that you use a tuple, and NOT A |
|
1383 | 1382 | LIST here, because of the way Python's except statement works. If |
|
1384 | 1383 | you only want to trap a single exception, use a singleton tuple: |
|
1385 | 1384 | |
|
1386 | 1385 | exc_tuple == (MyCustomException,) |
|
1387 | 1386 | |
|
1388 | 1387 | - handler: this must be defined as a function with the following |
|
1389 | 1388 | basic interface:: |
|
1390 | 1389 | |
|
1391 | 1390 | def my_handler(self, etype, value, tb, tb_offset=None) |
|
1392 | 1391 | ... |
|
1393 | 1392 | # The return value must be |
|
1394 | 1393 | return structured_traceback |
|
1395 | 1394 | |
|
1396 | 1395 | This will be made into an instance method (via new.instancemethod) |
|
1397 | 1396 | of IPython itself, and it will be called if any of the exceptions |
|
1398 | 1397 | listed in the exc_tuple are caught. If the handler is None, an |
|
1399 | 1398 | internal basic one is used, which just prints basic info. |
|
1400 | 1399 | |
|
1401 | 1400 | WARNING: by putting in your own exception handler into IPython's main |
|
1402 | 1401 | execution loop, you run a very good chance of nasty crashes. This |
|
1403 | 1402 | facility should only be used if you really know what you are doing.""" |
|
1404 | 1403 | |
|
1405 | 1404 | assert type(exc_tuple)==type(()) , \ |
|
1406 | 1405 | "The custom exceptions must be given AS A TUPLE." |
|
1407 | 1406 | |
|
1408 | 1407 | def dummy_handler(self,etype,value,tb): |
|
1409 | 1408 | print '*** Simple custom exception handler ***' |
|
1410 | 1409 | print 'Exception type :',etype |
|
1411 | 1410 | print 'Exception value:',value |
|
1412 | 1411 | print 'Traceback :',tb |
|
1413 | 1412 | print 'Source code :','\n'.join(self.buffer) |
|
1414 | 1413 | |
|
1415 | 1414 | if handler is None: handler = dummy_handler |
|
1416 | 1415 | |
|
1417 |
self.CustomTB = |
|
|
1416 | self.CustomTB = types.MethodType(handler, self) | |
|
1418 | 1417 | self.custom_exceptions = exc_tuple |
|
1419 | 1418 | |
|
1420 | 1419 | def excepthook(self, etype, value, tb): |
|
1421 | 1420 | """One more defense for GUI apps that call sys.excepthook. |
|
1422 | 1421 | |
|
1423 | 1422 | GUI frameworks like wxPython trap exceptions and call |
|
1424 | 1423 | sys.excepthook themselves. I guess this is a feature that |
|
1425 | 1424 | enables them to keep running after exceptions that would |
|
1426 | 1425 | otherwise kill their mainloop. This is a bother for IPython |
|
1427 | 1426 | which excepts to catch all of the program exceptions with a try: |
|
1428 | 1427 | except: statement. |
|
1429 | 1428 | |
|
1430 | 1429 | Normally, IPython sets sys.excepthook to a CrashHandler instance, so if |
|
1431 | 1430 | any app directly invokes sys.excepthook, it will look to the user like |
|
1432 | 1431 | IPython crashed. In order to work around this, we can disable the |
|
1433 | 1432 | CrashHandler and replace it with this excepthook instead, which prints a |
|
1434 | 1433 | regular traceback using our InteractiveTB. In this fashion, apps which |
|
1435 | 1434 | call sys.excepthook will generate a regular-looking exception from |
|
1436 | 1435 | IPython, and the CrashHandler will only be triggered by real IPython |
|
1437 | 1436 | crashes. |
|
1438 | 1437 | |
|
1439 | 1438 | This hook should be used sparingly, only in places which are not likely |
|
1440 | 1439 | to be true IPython errors. |
|
1441 | 1440 | """ |
|
1442 | 1441 | self.showtraceback((etype,value,tb),tb_offset=0) |
|
1443 | 1442 | |
|
1444 | 1443 | def showtraceback(self,exc_tuple = None,filename=None,tb_offset=None, |
|
1445 | 1444 | exception_only=False): |
|
1446 | 1445 | """Display the exception that just occurred. |
|
1447 | 1446 | |
|
1448 | 1447 | If nothing is known about the exception, this is the method which |
|
1449 | 1448 | should be used throughout the code for presenting user tracebacks, |
|
1450 | 1449 | rather than directly invoking the InteractiveTB object. |
|
1451 | 1450 | |
|
1452 | 1451 | A specific showsyntaxerror() also exists, but this method can take |
|
1453 | 1452 | care of calling it if needed, so unless you are explicitly catching a |
|
1454 | 1453 | SyntaxError exception, don't try to analyze the stack manually and |
|
1455 | 1454 | simply call this method.""" |
|
1456 | 1455 | |
|
1457 | 1456 | try: |
|
1458 | 1457 | if exc_tuple is None: |
|
1459 | 1458 | etype, value, tb = sys.exc_info() |
|
1460 | 1459 | else: |
|
1461 | 1460 | etype, value, tb = exc_tuple |
|
1462 | 1461 | |
|
1463 | 1462 | if etype is None: |
|
1464 | 1463 | if hasattr(sys, 'last_type'): |
|
1465 | 1464 | etype, value, tb = sys.last_type, sys.last_value, \ |
|
1466 | 1465 | sys.last_traceback |
|
1467 | 1466 | else: |
|
1468 | 1467 | self.write_err('No traceback available to show.\n') |
|
1469 | 1468 | return |
|
1470 | 1469 | |
|
1471 | 1470 | if etype is SyntaxError: |
|
1472 | 1471 | # Though this won't be called by syntax errors in the input |
|
1473 | 1472 | # line, there may be SyntaxError cases whith imported code. |
|
1474 | 1473 | self.showsyntaxerror(filename) |
|
1475 | 1474 | elif etype is UsageError: |
|
1476 | 1475 | print "UsageError:", value |
|
1477 | 1476 | else: |
|
1478 | 1477 | # WARNING: these variables are somewhat deprecated and not |
|
1479 | 1478 | # necessarily safe to use in a threaded environment, but tools |
|
1480 | 1479 | # like pdb depend on their existence, so let's set them. If we |
|
1481 | 1480 | # find problems in the field, we'll need to revisit their use. |
|
1482 | 1481 | sys.last_type = etype |
|
1483 | 1482 | sys.last_value = value |
|
1484 | 1483 | sys.last_traceback = tb |
|
1485 | 1484 | |
|
1486 | 1485 | if etype in self.custom_exceptions: |
|
1487 | 1486 | # FIXME: Old custom traceback objects may just return a |
|
1488 | 1487 | # string, in that case we just put it into a list |
|
1489 | 1488 | stb = self.CustomTB(etype, value, tb, tb_offset) |
|
1490 | 1489 | if isinstance(ctb, basestring): |
|
1491 | 1490 | stb = [stb] |
|
1492 | 1491 | else: |
|
1493 | 1492 | if exception_only: |
|
1494 | 1493 | stb = ['An exception has occurred, use %tb to see ' |
|
1495 | 1494 | 'the full traceback.\n'] |
|
1496 | 1495 | stb.extend(self.InteractiveTB.get_exception_only(etype, |
|
1497 | 1496 | value)) |
|
1498 | 1497 | else: |
|
1499 | 1498 | stb = self.InteractiveTB.structured_traceback(etype, |
|
1500 | 1499 | value, tb, tb_offset=tb_offset) |
|
1501 | 1500 | # FIXME: the pdb calling should be done by us, not by |
|
1502 | 1501 | # the code computing the traceback. |
|
1503 | 1502 | if self.InteractiveTB.call_pdb: |
|
1504 | 1503 | # pdb mucks up readline, fix it back |
|
1505 | 1504 | self.set_readline_completer() |
|
1506 | 1505 | |
|
1507 | 1506 | # Actually show the traceback |
|
1508 | 1507 | self._showtraceback(etype, value, stb) |
|
1509 | 1508 | |
|
1510 | 1509 | except KeyboardInterrupt: |
|
1511 | 1510 | self.write_err("\nKeyboardInterrupt\n") |
|
1512 | 1511 | |
|
1513 | 1512 | def _showtraceback(self, etype, evalue, stb): |
|
1514 | 1513 | """Actually show a traceback. |
|
1515 | 1514 | |
|
1516 | 1515 | Subclasses may override this method to put the traceback on a different |
|
1517 | 1516 | place, like a side channel. |
|
1518 | 1517 | """ |
|
1519 | 1518 | print >> io.Term.cout, self.InteractiveTB.stb2text(stb) |
|
1520 | 1519 | |
|
1521 | 1520 | def showsyntaxerror(self, filename=None): |
|
1522 | 1521 | """Display the syntax error that just occurred. |
|
1523 | 1522 | |
|
1524 | 1523 | This doesn't display a stack trace because there isn't one. |
|
1525 | 1524 | |
|
1526 | 1525 | If a filename is given, it is stuffed in the exception instead |
|
1527 | 1526 | of what was there before (because Python's parser always uses |
|
1528 | 1527 | "<string>" when reading from a string). |
|
1529 | 1528 | """ |
|
1530 | 1529 | etype, value, last_traceback = sys.exc_info() |
|
1531 | 1530 | |
|
1532 | 1531 | # See note about these variables in showtraceback() above |
|
1533 | 1532 | sys.last_type = etype |
|
1534 | 1533 | sys.last_value = value |
|
1535 | 1534 | sys.last_traceback = last_traceback |
|
1536 | 1535 | |
|
1537 | 1536 | if filename and etype is SyntaxError: |
|
1538 | 1537 | # Work hard to stuff the correct filename in the exception |
|
1539 | 1538 | try: |
|
1540 | 1539 | msg, (dummy_filename, lineno, offset, line) = value |
|
1541 | 1540 | except: |
|
1542 | 1541 | # Not the format we expect; leave it alone |
|
1543 | 1542 | pass |
|
1544 | 1543 | else: |
|
1545 | 1544 | # Stuff in the right filename |
|
1546 | 1545 | try: |
|
1547 | 1546 | # Assume SyntaxError is a class exception |
|
1548 | 1547 | value = SyntaxError(msg, (filename, lineno, offset, line)) |
|
1549 | 1548 | except: |
|
1550 | 1549 | # If that failed, assume SyntaxError is a string |
|
1551 | 1550 | value = msg, (filename, lineno, offset, line) |
|
1552 | 1551 | stb = self.SyntaxTB.structured_traceback(etype, value, []) |
|
1553 | 1552 | self._showtraceback(etype, value, stb) |
|
1554 | 1553 | |
|
1555 | 1554 | #------------------------------------------------------------------------- |
|
1556 | 1555 | # Things related to readline |
|
1557 | 1556 | #------------------------------------------------------------------------- |
|
1558 | 1557 | |
|
1559 | 1558 | def init_readline(self): |
|
1560 | 1559 | """Command history completion/saving/reloading.""" |
|
1561 | 1560 | |
|
1562 | 1561 | if self.readline_use: |
|
1563 | 1562 | import IPython.utils.rlineimpl as readline |
|
1564 | 1563 | |
|
1565 | 1564 | self.rl_next_input = None |
|
1566 | 1565 | self.rl_do_indent = False |
|
1567 | 1566 | |
|
1568 | 1567 | if not self.readline_use or not readline.have_readline: |
|
1569 | 1568 | self.has_readline = False |
|
1570 | 1569 | self.readline = None |
|
1571 | 1570 | # Set a number of methods that depend on readline to be no-op |
|
1572 | 1571 | self.savehist = no_op |
|
1573 | 1572 | self.reloadhist = no_op |
|
1574 | 1573 | self.set_readline_completer = no_op |
|
1575 | 1574 | self.set_custom_completer = no_op |
|
1576 | 1575 | self.set_completer_frame = no_op |
|
1577 | 1576 | warn('Readline services not available or not loaded.') |
|
1578 | 1577 | else: |
|
1579 | 1578 | self.has_readline = True |
|
1580 | 1579 | self.readline = readline |
|
1581 | 1580 | sys.modules['readline'] = readline |
|
1582 | 1581 | |
|
1583 | 1582 | # Platform-specific configuration |
|
1584 | 1583 | if os.name == 'nt': |
|
1585 | 1584 | # FIXME - check with Frederick to see if we can harmonize |
|
1586 | 1585 | # naming conventions with pyreadline to avoid this |
|
1587 | 1586 | # platform-dependent check |
|
1588 | 1587 | self.readline_startup_hook = readline.set_pre_input_hook |
|
1589 | 1588 | else: |
|
1590 | 1589 | self.readline_startup_hook = readline.set_startup_hook |
|
1591 | 1590 | |
|
1592 | 1591 | # Load user's initrc file (readline config) |
|
1593 | 1592 | # Or if libedit is used, load editrc. |
|
1594 | 1593 | inputrc_name = os.environ.get('INPUTRC') |
|
1595 | 1594 | if inputrc_name is None: |
|
1596 | 1595 | home_dir = get_home_dir() |
|
1597 | 1596 | if home_dir is not None: |
|
1598 | 1597 | inputrc_name = '.inputrc' |
|
1599 | 1598 | if readline.uses_libedit: |
|
1600 | 1599 | inputrc_name = '.editrc' |
|
1601 | 1600 | inputrc_name = os.path.join(home_dir, inputrc_name) |
|
1602 | 1601 | if os.path.isfile(inputrc_name): |
|
1603 | 1602 | try: |
|
1604 | 1603 | readline.read_init_file(inputrc_name) |
|
1605 | 1604 | except: |
|
1606 | 1605 | warn('Problems reading readline initialization file <%s>' |
|
1607 | 1606 | % inputrc_name) |
|
1608 | 1607 | |
|
1609 | 1608 | # Configure readline according to user's prefs |
|
1610 | 1609 | # This is only done if GNU readline is being used. If libedit |
|
1611 | 1610 | # is being used (as on Leopard) the readline config is |
|
1612 | 1611 | # not run as the syntax for libedit is different. |
|
1613 | 1612 | if not readline.uses_libedit: |
|
1614 | 1613 | for rlcommand in self.readline_parse_and_bind: |
|
1615 | 1614 | #print "loading rl:",rlcommand # dbg |
|
1616 | 1615 | readline.parse_and_bind(rlcommand) |
|
1617 | 1616 | |
|
1618 | 1617 | # Remove some chars from the delimiters list. If we encounter |
|
1619 | 1618 | # unicode chars, discard them. |
|
1620 | 1619 | delims = readline.get_completer_delims().encode("ascii", "ignore") |
|
1621 | 1620 | delims = delims.translate(string._idmap, |
|
1622 | 1621 | self.readline_remove_delims) |
|
1623 | 1622 | delims = delims.replace(ESC_MAGIC, '') |
|
1624 | 1623 | readline.set_completer_delims(delims) |
|
1625 | 1624 | # otherwise we end up with a monster history after a while: |
|
1626 | 1625 | readline.set_history_length(1000) |
|
1627 | 1626 | try: |
|
1628 | 1627 | #print '*** Reading readline history' # dbg |
|
1629 | 1628 | readline.read_history_file(self.histfile) |
|
1630 | 1629 | except IOError: |
|
1631 | 1630 | pass # It doesn't exist yet. |
|
1632 | 1631 | |
|
1633 | 1632 | # If we have readline, we want our history saved upon ipython |
|
1634 | 1633 | # exiting. |
|
1635 | 1634 | atexit.register(self.savehist) |
|
1636 | 1635 | |
|
1637 | 1636 | # Configure auto-indent for all platforms |
|
1638 | 1637 | self.set_autoindent(self.autoindent) |
|
1639 | 1638 | |
|
1640 | 1639 | def set_next_input(self, s): |
|
1641 | 1640 | """ Sets the 'default' input string for the next command line. |
|
1642 | 1641 | |
|
1643 | 1642 | Requires readline. |
|
1644 | 1643 | |
|
1645 | 1644 | Example: |
|
1646 | 1645 | |
|
1647 | 1646 | [D:\ipython]|1> _ip.set_next_input("Hello Word") |
|
1648 | 1647 | [D:\ipython]|2> Hello Word_ # cursor is here |
|
1649 | 1648 | """ |
|
1650 | 1649 | |
|
1651 | 1650 | self.rl_next_input = s |
|
1652 | 1651 | |
|
1653 | 1652 | # Maybe move this to the terminal subclass? |
|
1654 | 1653 | def pre_readline(self): |
|
1655 | 1654 | """readline hook to be used at the start of each line. |
|
1656 | 1655 | |
|
1657 | 1656 | Currently it handles auto-indent only.""" |
|
1658 | 1657 | |
|
1659 | 1658 | if self.rl_do_indent: |
|
1660 | 1659 | self.readline.insert_text(self._indent_current_str()) |
|
1661 | 1660 | if self.rl_next_input is not None: |
|
1662 | 1661 | self.readline.insert_text(self.rl_next_input) |
|
1663 | 1662 | self.rl_next_input = None |
|
1664 | 1663 | |
|
1665 | 1664 | def _indent_current_str(self): |
|
1666 | 1665 | """return the current level of indentation as a string""" |
|
1667 | 1666 | return self.indent_current_nsp * ' ' |
|
1668 | 1667 | |
|
1669 | 1668 | #------------------------------------------------------------------------- |
|
1670 | 1669 | # Things related to text completion |
|
1671 | 1670 | #------------------------------------------------------------------------- |
|
1672 | 1671 | |
|
1673 | 1672 | def init_completer(self): |
|
1674 | 1673 | """Initialize the completion machinery. |
|
1675 | 1674 | |
|
1676 | 1675 | This creates completion machinery that can be used by client code, |
|
1677 | 1676 | either interactively in-process (typically triggered by the readline |
|
1678 | 1677 | library), programatically (such as in test suites) or out-of-prcess |
|
1679 | 1678 | (typically over the network by remote frontends). |
|
1680 | 1679 | """ |
|
1681 | 1680 | from IPython.core.completer import IPCompleter |
|
1682 | 1681 | from IPython.core.completerlib import (module_completer, |
|
1683 | 1682 | magic_run_completer, cd_completer) |
|
1684 | 1683 | |
|
1685 | 1684 | self.Completer = IPCompleter(self, |
|
1686 | 1685 | self.user_ns, |
|
1687 | 1686 | self.user_global_ns, |
|
1688 | 1687 | self.readline_omit__names, |
|
1689 | 1688 | self.alias_manager.alias_table, |
|
1690 | 1689 | self.has_readline) |
|
1691 | 1690 | |
|
1692 | 1691 | # Add custom completers to the basic ones built into IPCompleter |
|
1693 | 1692 | sdisp = self.strdispatchers.get('complete_command', StrDispatch()) |
|
1694 | 1693 | self.strdispatchers['complete_command'] = sdisp |
|
1695 | 1694 | self.Completer.custom_completers = sdisp |
|
1696 | 1695 | |
|
1697 | 1696 | self.set_hook('complete_command', module_completer, str_key = 'import') |
|
1698 | 1697 | self.set_hook('complete_command', module_completer, str_key = 'from') |
|
1699 | 1698 | self.set_hook('complete_command', magic_run_completer, str_key = '%run') |
|
1700 | 1699 | self.set_hook('complete_command', cd_completer, str_key = '%cd') |
|
1701 | 1700 | |
|
1702 | 1701 | # Only configure readline if we truly are using readline. IPython can |
|
1703 | 1702 | # do tab-completion over the network, in GUIs, etc, where readline |
|
1704 | 1703 | # itself may be absent |
|
1705 | 1704 | if self.has_readline: |
|
1706 | 1705 | self.set_readline_completer() |
|
1707 | 1706 | |
|
1708 | 1707 | def complete(self, text, line=None, cursor_pos=None): |
|
1709 | 1708 | """Return the completed text and a list of completions. |
|
1710 | 1709 | |
|
1711 | 1710 | Parameters |
|
1712 | 1711 | ---------- |
|
1713 | 1712 | |
|
1714 | 1713 | text : string |
|
1715 | 1714 | A string of text to be completed on. It can be given as empty and |
|
1716 | 1715 | instead a line/position pair are given. In this case, the |
|
1717 | 1716 | completer itself will split the line like readline does. |
|
1718 | 1717 | |
|
1719 | 1718 | line : string, optional |
|
1720 | 1719 | The complete line that text is part of. |
|
1721 | 1720 | |
|
1722 | 1721 | cursor_pos : int, optional |
|
1723 | 1722 | The position of the cursor on the input line. |
|
1724 | 1723 | |
|
1725 | 1724 | Returns |
|
1726 | 1725 | ------- |
|
1727 | 1726 | text : string |
|
1728 | 1727 | The actual text that was completed. |
|
1729 | 1728 | |
|
1730 | 1729 | matches : list |
|
1731 | 1730 | A sorted list with all possible completions. |
|
1732 | 1731 | |
|
1733 | 1732 | The optional arguments allow the completion to take more context into |
|
1734 | 1733 | account, and are part of the low-level completion API. |
|
1735 | 1734 | |
|
1736 | 1735 | This is a wrapper around the completion mechanism, similar to what |
|
1737 | 1736 | readline does at the command line when the TAB key is hit. By |
|
1738 | 1737 | exposing it as a method, it can be used by other non-readline |
|
1739 | 1738 | environments (such as GUIs) for text completion. |
|
1740 | 1739 | |
|
1741 | 1740 | Simple usage example: |
|
1742 | 1741 | |
|
1743 | 1742 | In [1]: x = 'hello' |
|
1744 | 1743 | |
|
1745 | 1744 | In [2]: _ip.complete('x.l') |
|
1746 | 1745 | Out[2]: ('x.l', ['x.ljust', 'x.lower', 'x.lstrip']) |
|
1747 | 1746 | """ |
|
1748 | 1747 | |
|
1749 | 1748 | # Inject names into __builtin__ so we can complete on the added names. |
|
1750 | 1749 | with self.builtin_trap: |
|
1751 | 1750 | return self.Completer.complete(text, line, cursor_pos) |
|
1752 | 1751 | |
|
1753 | 1752 | def set_custom_completer(self, completer, pos=0): |
|
1754 | 1753 | """Adds a new custom completer function. |
|
1755 | 1754 | |
|
1756 | 1755 | The position argument (defaults to 0) is the index in the completers |
|
1757 | 1756 | list where you want the completer to be inserted.""" |
|
1758 | 1757 | |
|
1759 |
newcomp = |
|
|
1760 | self.Completer.__class__) | |
|
1758 | newcomp = types.MethodType(completer, self.Completer) | |
|
1761 | 1759 | self.Completer.matchers.insert(pos,newcomp) |
|
1762 | 1760 | |
|
1763 | 1761 | def set_readline_completer(self): |
|
1764 | 1762 | """Reset readline's completer to be our own.""" |
|
1765 | 1763 | self.readline.set_completer(self.Completer.rlcomplete) |
|
1766 | 1764 | |
|
1767 | 1765 | def set_completer_frame(self, frame=None): |
|
1768 | 1766 | """Set the frame of the completer.""" |
|
1769 | 1767 | if frame: |
|
1770 | 1768 | self.Completer.namespace = frame.f_locals |
|
1771 | 1769 | self.Completer.global_namespace = frame.f_globals |
|
1772 | 1770 | else: |
|
1773 | 1771 | self.Completer.namespace = self.user_ns |
|
1774 | 1772 | self.Completer.global_namespace = self.user_global_ns |
|
1775 | 1773 | |
|
1776 | 1774 | #------------------------------------------------------------------------- |
|
1777 | 1775 | # Things related to magics |
|
1778 | 1776 | #------------------------------------------------------------------------- |
|
1779 | 1777 | |
|
1780 | 1778 | def init_magics(self): |
|
1781 | 1779 | # FIXME: Move the color initialization to the DisplayHook, which |
|
1782 | 1780 | # should be split into a prompt manager and displayhook. We probably |
|
1783 | 1781 | # even need a centralize colors management object. |
|
1784 | 1782 | self.magic_colors(self.colors) |
|
1785 | 1783 | # History was moved to a separate module |
|
1786 | 1784 | from . import history |
|
1787 | 1785 | history.init_ipython(self) |
|
1788 | 1786 | |
|
1789 | 1787 | def magic(self,arg_s): |
|
1790 | 1788 | """Call a magic function by name. |
|
1791 | 1789 | |
|
1792 | 1790 | Input: a string containing the name of the magic function to call and |
|
1793 | 1791 | any additional arguments to be passed to the magic. |
|
1794 | 1792 | |
|
1795 | 1793 | magic('name -opt foo bar') is equivalent to typing at the ipython |
|
1796 | 1794 | prompt: |
|
1797 | 1795 | |
|
1798 | 1796 | In[1]: %name -opt foo bar |
|
1799 | 1797 | |
|
1800 | 1798 | To call a magic without arguments, simply use magic('name'). |
|
1801 | 1799 | |
|
1802 | 1800 | This provides a proper Python function to call IPython's magics in any |
|
1803 | 1801 | valid Python code you can type at the interpreter, including loops and |
|
1804 | 1802 | compound statements. |
|
1805 | 1803 | """ |
|
1806 | 1804 | args = arg_s.split(' ',1) |
|
1807 | 1805 | magic_name = args[0] |
|
1808 | 1806 | magic_name = magic_name.lstrip(prefilter.ESC_MAGIC) |
|
1809 | 1807 | |
|
1810 | 1808 | try: |
|
1811 | 1809 | magic_args = args[1] |
|
1812 | 1810 | except IndexError: |
|
1813 | 1811 | magic_args = '' |
|
1814 | 1812 | fn = getattr(self,'magic_'+magic_name,None) |
|
1815 | 1813 | if fn is None: |
|
1816 | 1814 | error("Magic function `%s` not found." % magic_name) |
|
1817 | 1815 | else: |
|
1818 | 1816 | magic_args = self.var_expand(magic_args,1) |
|
1819 | 1817 | with nested(self.builtin_trap,): |
|
1820 | 1818 | result = fn(magic_args) |
|
1821 | 1819 | return result |
|
1822 | 1820 | |
|
1823 | 1821 | def define_magic(self, magicname, func): |
|
1824 | 1822 | """Expose own function as magic function for ipython |
|
1825 | 1823 | |
|
1826 | 1824 | def foo_impl(self,parameter_s=''): |
|
1827 | 1825 | 'My very own magic!. (Use docstrings, IPython reads them).' |
|
1828 | 1826 | print 'Magic function. Passed parameter is between < >:' |
|
1829 | 1827 | print '<%s>' % parameter_s |
|
1830 | 1828 | print 'The self object is:',self |
|
1831 | ||
|
1829 | newcomp = types.MethodType(completer, self.Completer) | |
|
1832 | 1830 | self.define_magic('foo',foo_impl) |
|
1833 | 1831 | """ |
|
1834 | 1832 | |
|
1835 | import new | |
|
1836 | im = new.instancemethod(func,self, self.__class__) | |
|
1833 | im = types.MethodType(func, self) | |
|
1837 | 1834 | old = getattr(self, "magic_" + magicname, None) |
|
1838 | 1835 | setattr(self, "magic_" + magicname, im) |
|
1839 | 1836 | return old |
|
1840 | 1837 | |
|
1841 | 1838 | #------------------------------------------------------------------------- |
|
1842 | 1839 | # Things related to macros |
|
1843 | 1840 | #------------------------------------------------------------------------- |
|
1844 | 1841 | |
|
1845 | 1842 | def define_macro(self, name, themacro): |
|
1846 | 1843 | """Define a new macro |
|
1847 | 1844 | |
|
1848 | 1845 | Parameters |
|
1849 | 1846 | ---------- |
|
1850 | 1847 | name : str |
|
1851 | 1848 | The name of the macro. |
|
1852 | 1849 | themacro : str or Macro |
|
1853 | 1850 | The action to do upon invoking the macro. If a string, a new |
|
1854 | 1851 | Macro object is created by passing the string to it. |
|
1855 | 1852 | """ |
|
1856 | 1853 | |
|
1857 | 1854 | from IPython.core import macro |
|
1858 | 1855 | |
|
1859 | 1856 | if isinstance(themacro, basestring): |
|
1860 | 1857 | themacro = macro.Macro(themacro) |
|
1861 | 1858 | if not isinstance(themacro, macro.Macro): |
|
1862 | 1859 | raise ValueError('A macro must be a string or a Macro instance.') |
|
1863 | 1860 | self.user_ns[name] = themacro |
|
1864 | 1861 | |
|
1865 | 1862 | #------------------------------------------------------------------------- |
|
1866 | 1863 | # Things related to the running of system commands |
|
1867 | 1864 | #------------------------------------------------------------------------- |
|
1868 | 1865 | |
|
1869 | 1866 | def system(self, cmd): |
|
1870 | 1867 | """Call the given cmd in a subprocess. |
|
1871 | 1868 | |
|
1872 | 1869 | Parameters |
|
1873 | 1870 | ---------- |
|
1874 | 1871 | cmd : str |
|
1875 | 1872 | Command to execute (can not end in '&', as bacground processes are |
|
1876 | 1873 | not supported. |
|
1877 | 1874 | """ |
|
1878 | 1875 | # We do not support backgrounding processes because we either use |
|
1879 | 1876 | # pexpect or pipes to read from. Users can always just call |
|
1880 | 1877 | # os.system() if they really want a background process. |
|
1881 | 1878 | if cmd.endswith('&'): |
|
1882 | 1879 | raise OSError("Background processes not supported.") |
|
1883 | 1880 | |
|
1884 | 1881 | return system(self.var_expand(cmd, depth=2)) |
|
1885 | 1882 | |
|
1886 | 1883 | def getoutput(self, cmd, split=True): |
|
1887 | 1884 | """Get output (possibly including stderr) from a subprocess. |
|
1888 | 1885 | |
|
1889 | 1886 | Parameters |
|
1890 | 1887 | ---------- |
|
1891 | 1888 | cmd : str |
|
1892 | 1889 | Command to execute (can not end in '&', as background processes are |
|
1893 | 1890 | not supported. |
|
1894 | 1891 | split : bool, optional |
|
1895 | 1892 | |
|
1896 | 1893 | If True, split the output into an IPython SList. Otherwise, an |
|
1897 | 1894 | IPython LSString is returned. These are objects similar to normal |
|
1898 | 1895 | lists and strings, with a few convenience attributes for easier |
|
1899 | 1896 | manipulation of line-based output. You can use '?' on them for |
|
1900 | 1897 | details. |
|
1901 | 1898 | """ |
|
1902 | 1899 | if cmd.endswith('&'): |
|
1903 | 1900 | raise OSError("Background processes not supported.") |
|
1904 | 1901 | out = getoutput(self.var_expand(cmd, depth=2)) |
|
1905 | 1902 | if split: |
|
1906 | 1903 | out = SList(out.splitlines()) |
|
1907 | 1904 | else: |
|
1908 | 1905 | out = LSString(out) |
|
1909 | 1906 | return out |
|
1910 | 1907 | |
|
1911 | 1908 | #------------------------------------------------------------------------- |
|
1912 | 1909 | # Things related to aliases |
|
1913 | 1910 | #------------------------------------------------------------------------- |
|
1914 | 1911 | |
|
1915 | 1912 | def init_alias(self): |
|
1916 | 1913 | self.alias_manager = AliasManager(shell=self, config=self.config) |
|
1917 | 1914 | self.ns_table['alias'] = self.alias_manager.alias_table, |
|
1918 | 1915 | |
|
1919 | 1916 | #------------------------------------------------------------------------- |
|
1920 | 1917 | # Things related to extensions and plugins |
|
1921 | 1918 | #------------------------------------------------------------------------- |
|
1922 | 1919 | |
|
1923 | 1920 | def init_extension_manager(self): |
|
1924 | 1921 | self.extension_manager = ExtensionManager(shell=self, config=self.config) |
|
1925 | 1922 | |
|
1926 | 1923 | def init_plugin_manager(self): |
|
1927 | 1924 | self.plugin_manager = PluginManager(config=self.config) |
|
1928 | 1925 | |
|
1929 | 1926 | #------------------------------------------------------------------------- |
|
1930 | 1927 | # Things related to payloads |
|
1931 | 1928 | #------------------------------------------------------------------------- |
|
1932 | 1929 | |
|
1933 | 1930 | def init_payload(self): |
|
1934 | 1931 | self.payload_manager = PayloadManager(config=self.config) |
|
1935 | 1932 | |
|
1936 | 1933 | #------------------------------------------------------------------------- |
|
1937 | 1934 | # Things related to the prefilter |
|
1938 | 1935 | #------------------------------------------------------------------------- |
|
1939 | 1936 | |
|
1940 | 1937 | def init_prefilter(self): |
|
1941 | 1938 | self.prefilter_manager = PrefilterManager(shell=self, config=self.config) |
|
1942 | 1939 | # Ultimately this will be refactored in the new interpreter code, but |
|
1943 | 1940 | # for now, we should expose the main prefilter method (there's legacy |
|
1944 | 1941 | # code out there that may rely on this). |
|
1945 | 1942 | self.prefilter = self.prefilter_manager.prefilter_lines |
|
1946 | 1943 | |
|
1947 | 1944 | |
|
1948 | 1945 | def auto_rewrite_input(self, cmd): |
|
1949 | 1946 | """Print to the screen the rewritten form of the user's command. |
|
1950 | 1947 | |
|
1951 | 1948 | This shows visual feedback by rewriting input lines that cause |
|
1952 | 1949 | automatic calling to kick in, like:: |
|
1953 | 1950 | |
|
1954 | 1951 | /f x |
|
1955 | 1952 | |
|
1956 | 1953 | into:: |
|
1957 | 1954 | |
|
1958 | 1955 | ------> f(x) |
|
1959 | 1956 | |
|
1960 | 1957 | after the user's input prompt. This helps the user understand that the |
|
1961 | 1958 | input line was transformed automatically by IPython. |
|
1962 | 1959 | """ |
|
1963 | 1960 | rw = self.displayhook.prompt1.auto_rewrite() + cmd |
|
1964 | 1961 | |
|
1965 | 1962 | try: |
|
1966 | 1963 | # plain ascii works better w/ pyreadline, on some machines, so |
|
1967 | 1964 | # we use it and only print uncolored rewrite if we have unicode |
|
1968 | 1965 | rw = str(rw) |
|
1969 | 1966 | print >> IPython.utils.io.Term.cout, rw |
|
1970 | 1967 | except UnicodeEncodeError: |
|
1971 | 1968 | print "------> " + cmd |
|
1972 | 1969 | |
|
1973 | 1970 | #------------------------------------------------------------------------- |
|
1974 | 1971 | # Things related to extracting values/expressions from kernel and user_ns |
|
1975 | 1972 | #------------------------------------------------------------------------- |
|
1976 | 1973 | |
|
1977 | 1974 | def _simple_error(self): |
|
1978 | 1975 | etype, value = sys.exc_info()[:2] |
|
1979 | 1976 | return u'[ERROR] {e.__name__}: {v}'.format(e=etype, v=value) |
|
1980 | 1977 | |
|
1981 | 1978 | def user_variables(self, names): |
|
1982 | 1979 | """Get a list of variable names from the user's namespace. |
|
1983 | 1980 | |
|
1984 | 1981 | Parameters |
|
1985 | 1982 | ---------- |
|
1986 | 1983 | names : list of strings |
|
1987 | 1984 | A list of names of variables to be read from the user namespace. |
|
1988 | 1985 | |
|
1989 | 1986 | Returns |
|
1990 | 1987 | ------- |
|
1991 | 1988 | A dict, keyed by the input names and with the repr() of each value. |
|
1992 | 1989 | """ |
|
1993 | 1990 | out = {} |
|
1994 | 1991 | user_ns = self.user_ns |
|
1995 | 1992 | for varname in names: |
|
1996 | 1993 | try: |
|
1997 | 1994 | value = repr(user_ns[varname]) |
|
1998 | 1995 | except: |
|
1999 | 1996 | value = self._simple_error() |
|
2000 | 1997 | out[varname] = value |
|
2001 | 1998 | return out |
|
2002 | 1999 | |
|
2003 | 2000 | def user_expressions(self, expressions): |
|
2004 | 2001 | """Evaluate a dict of expressions in the user's namespace. |
|
2005 | 2002 | |
|
2006 | 2003 | Parameters |
|
2007 | 2004 | ---------- |
|
2008 | 2005 | expressions : dict |
|
2009 | 2006 | A dict with string keys and string values. The expression values |
|
2010 | 2007 | should be valid Python expressions, each of which will be evaluated |
|
2011 | 2008 | in the user namespace. |
|
2012 | 2009 | |
|
2013 | 2010 | Returns |
|
2014 | 2011 | ------- |
|
2015 | 2012 | A dict, keyed like the input expressions dict, with the repr() of each |
|
2016 | 2013 | value. |
|
2017 | 2014 | """ |
|
2018 | 2015 | out = {} |
|
2019 | 2016 | user_ns = self.user_ns |
|
2020 | 2017 | global_ns = self.user_global_ns |
|
2021 | 2018 | for key, expr in expressions.iteritems(): |
|
2022 | 2019 | try: |
|
2023 | 2020 | value = repr(eval(expr, global_ns, user_ns)) |
|
2024 | 2021 | except: |
|
2025 | 2022 | value = self._simple_error() |
|
2026 | 2023 | out[key] = value |
|
2027 | 2024 | return out |
|
2028 | 2025 | |
|
2029 | 2026 | #------------------------------------------------------------------------- |
|
2030 | 2027 | # Things related to the running of code |
|
2031 | 2028 | #------------------------------------------------------------------------- |
|
2032 | 2029 | |
|
2033 | 2030 | def ex(self, cmd): |
|
2034 | 2031 | """Execute a normal python statement in user namespace.""" |
|
2035 | 2032 | with nested(self.builtin_trap,): |
|
2036 | 2033 | exec cmd in self.user_global_ns, self.user_ns |
|
2037 | 2034 | |
|
2038 | 2035 | def ev(self, expr): |
|
2039 | 2036 | """Evaluate python expression expr in user namespace. |
|
2040 | 2037 | |
|
2041 | 2038 | Returns the result of evaluation |
|
2042 | 2039 | """ |
|
2043 | 2040 | with nested(self.builtin_trap,): |
|
2044 | 2041 | return eval(expr, self.user_global_ns, self.user_ns) |
|
2045 | 2042 | |
|
2046 | 2043 | def safe_execfile(self, fname, *where, **kw): |
|
2047 | 2044 | """A safe version of the builtin execfile(). |
|
2048 | 2045 | |
|
2049 | 2046 | This version will never throw an exception, but instead print |
|
2050 | 2047 | helpful error messages to the screen. This only works on pure |
|
2051 | 2048 | Python files with the .py extension. |
|
2052 | 2049 | |
|
2053 | 2050 | Parameters |
|
2054 | 2051 | ---------- |
|
2055 | 2052 | fname : string |
|
2056 | 2053 | The name of the file to be executed. |
|
2057 | 2054 | where : tuple |
|
2058 | 2055 | One or two namespaces, passed to execfile() as (globals,locals). |
|
2059 | 2056 | If only one is given, it is passed as both. |
|
2060 | 2057 | exit_ignore : bool (False) |
|
2061 | 2058 | If True, then silence SystemExit for non-zero status (it is always |
|
2062 | 2059 | silenced for zero status, as it is so common). |
|
2063 | 2060 | """ |
|
2064 | 2061 | kw.setdefault('exit_ignore', False) |
|
2065 | 2062 | |
|
2066 | 2063 | fname = os.path.abspath(os.path.expanduser(fname)) |
|
2067 | 2064 | |
|
2068 | 2065 | # Make sure we have a .py file |
|
2069 | 2066 | if not fname.endswith('.py'): |
|
2070 | 2067 | warn('File must end with .py to be run using execfile: <%s>' % fname) |
|
2071 | 2068 | |
|
2072 | 2069 | # Make sure we can open the file |
|
2073 | 2070 | try: |
|
2074 | 2071 | with open(fname) as thefile: |
|
2075 | 2072 | pass |
|
2076 | 2073 | except: |
|
2077 | 2074 | warn('Could not open file <%s> for safe execution.' % fname) |
|
2078 | 2075 | return |
|
2079 | 2076 | |
|
2080 | 2077 | # Find things also in current directory. This is needed to mimic the |
|
2081 | 2078 | # behavior of running a script from the system command line, where |
|
2082 | 2079 | # Python inserts the script's directory into sys.path |
|
2083 | 2080 | dname = os.path.dirname(fname) |
|
2084 | 2081 | |
|
2085 | 2082 | with prepended_to_syspath(dname): |
|
2086 | 2083 | try: |
|
2087 | 2084 | execfile(fname,*where) |
|
2088 | 2085 | except SystemExit, status: |
|
2089 | 2086 | # If the call was made with 0 or None exit status (sys.exit(0) |
|
2090 | 2087 | # or sys.exit() ), don't bother showing a traceback, as both of |
|
2091 | 2088 | # these are considered normal by the OS: |
|
2092 | 2089 | # > python -c'import sys;sys.exit(0)'; echo $? |
|
2093 | 2090 | # 0 |
|
2094 | 2091 | # > python -c'import sys;sys.exit()'; echo $? |
|
2095 | 2092 | # 0 |
|
2096 | 2093 | # For other exit status, we show the exception unless |
|
2097 | 2094 | # explicitly silenced, but only in short form. |
|
2098 | 2095 | if status.code not in (0, None) and not kw['exit_ignore']: |
|
2099 | 2096 | self.showtraceback(exception_only=True) |
|
2100 | 2097 | except: |
|
2101 | 2098 | self.showtraceback() |
|
2102 | 2099 | |
|
2103 | 2100 | def safe_execfile_ipy(self, fname): |
|
2104 | 2101 | """Like safe_execfile, but for .ipy files with IPython syntax. |
|
2105 | 2102 | |
|
2106 | 2103 | Parameters |
|
2107 | 2104 | ---------- |
|
2108 | 2105 | fname : str |
|
2109 | 2106 | The name of the file to execute. The filename must have a |
|
2110 | 2107 | .ipy extension. |
|
2111 | 2108 | """ |
|
2112 | 2109 | fname = os.path.abspath(os.path.expanduser(fname)) |
|
2113 | 2110 | |
|
2114 | 2111 | # Make sure we have a .py file |
|
2115 | 2112 | if not fname.endswith('.ipy'): |
|
2116 | 2113 | warn('File must end with .py to be run using execfile: <%s>' % fname) |
|
2117 | 2114 | |
|
2118 | 2115 | # Make sure we can open the file |
|
2119 | 2116 | try: |
|
2120 | 2117 | with open(fname) as thefile: |
|
2121 | 2118 | pass |
|
2122 | 2119 | except: |
|
2123 | 2120 | warn('Could not open file <%s> for safe execution.' % fname) |
|
2124 | 2121 | return |
|
2125 | 2122 | |
|
2126 | 2123 | # Find things also in current directory. This is needed to mimic the |
|
2127 | 2124 | # behavior of running a script from the system command line, where |
|
2128 | 2125 | # Python inserts the script's directory into sys.path |
|
2129 | 2126 | dname = os.path.dirname(fname) |
|
2130 | 2127 | |
|
2131 | 2128 | with prepended_to_syspath(dname): |
|
2132 | 2129 | try: |
|
2133 | 2130 | with open(fname) as thefile: |
|
2134 | 2131 | script = thefile.read() |
|
2135 | 2132 | # self.runlines currently captures all exceptions |
|
2136 | 2133 | # raise in user code. It would be nice if there were |
|
2137 | 2134 | # versions of runlines, execfile that did raise, so |
|
2138 | 2135 | # we could catch the errors. |
|
2139 | 2136 | self.runlines(script, clean=True) |
|
2140 | 2137 | except: |
|
2141 | 2138 | self.showtraceback() |
|
2142 | 2139 | warn('Unknown failure executing file: <%s>' % fname) |
|
2143 | 2140 | |
|
2144 | 2141 | def run_cell(self, cell): |
|
2145 | 2142 | """Run the contents of an entire multiline 'cell' of code. |
|
2146 | 2143 | |
|
2147 | 2144 | The cell is split into separate blocks which can be executed |
|
2148 | 2145 | individually. Then, based on how many blocks there are, they are |
|
2149 | 2146 | executed as follows: |
|
2150 | 2147 | |
|
2151 | 2148 | - A single block: 'single' mode. |
|
2152 | 2149 | |
|
2153 | 2150 | If there's more than one block, it depends: |
|
2154 | 2151 | |
|
2155 | 2152 | - if the last one is no more than two lines long, run all but the last |
|
2156 | 2153 | in 'exec' mode and the very last one in 'single' mode. This makes it |
|
2157 | 2154 | easy to type simple expressions at the end to see computed values. - |
|
2158 | 2155 | otherwise (last one is also multiline), run all in 'exec' mode |
|
2159 | 2156 | |
|
2160 | 2157 | When code is executed in 'single' mode, :func:`sys.displayhook` fires, |
|
2161 | 2158 | results are displayed and output prompts are computed. In 'exec' mode, |
|
2162 | 2159 | no results are displayed unless :func:`print` is called explicitly; |
|
2163 | 2160 | this mode is more akin to running a script. |
|
2164 | 2161 | |
|
2165 | 2162 | Parameters |
|
2166 | 2163 | ---------- |
|
2167 | 2164 | cell : str |
|
2168 | 2165 | A single or multiline string. |
|
2169 | 2166 | """ |
|
2170 | 2167 | ################################################################# |
|
2171 | 2168 | # FIXME |
|
2172 | 2169 | # ===== |
|
2173 | 2170 | # This execution logic should stop calling runlines altogether, and |
|
2174 | 2171 | # instead we should do what runlines does, in a controlled manner, here |
|
2175 | 2172 | # (runlines mutates lots of state as it goes calling sub-methods that |
|
2176 | 2173 | # also mutate state). Basically we should: |
|
2177 | 2174 | # - apply dynamic transforms for single-line input (the ones that |
|
2178 | 2175 | # split_blocks won't apply since they need context). |
|
2179 | 2176 | # - increment the global execution counter (we need to pull that out |
|
2180 | 2177 | # from outputcache's control; outputcache should instead read it from |
|
2181 | 2178 | # the main object). |
|
2182 | 2179 | # - do any logging of input |
|
2183 | 2180 | # - update histories (raw/translated) |
|
2184 | 2181 | # - then, call plain runsource (for single blocks, so displayhook is |
|
2185 | 2182 | # triggered) or runcode (for multiline blocks in exec mode). |
|
2186 | 2183 | # |
|
2187 | 2184 | # Once this is done, we'll be able to stop using runlines and we'll |
|
2188 | 2185 | # also have a much cleaner separation of logging, input history and |
|
2189 | 2186 | # output cache management. |
|
2190 | 2187 | ################################################################# |
|
2191 | 2188 | |
|
2192 | 2189 | # We need to break up the input into executable blocks that can be run |
|
2193 | 2190 | # in 'single' mode, to provide comfortable user behavior. |
|
2194 | 2191 | blocks = self.input_splitter.split_blocks(cell) |
|
2195 | 2192 | |
|
2196 | 2193 | if not blocks: |
|
2197 | 2194 | return |
|
2198 | 2195 | |
|
2199 | 2196 | # Single-block input should behave like an interactive prompt |
|
2200 | 2197 | if len(blocks) == 1: |
|
2201 | 2198 | self.runlines(blocks[0]) |
|
2202 | 2199 | return |
|
2203 | 2200 | |
|
2204 | 2201 | # In multi-block input, if the last block is a simple (one-two lines) |
|
2205 | 2202 | # expression, run it in single mode so it produces output. Otherwise |
|
2206 | 2203 | # just feed the whole thing to runcode. |
|
2207 | 2204 | # This seems like a reasonable usability design. |
|
2208 | 2205 | last = blocks[-1] |
|
2209 | 2206 | |
|
2210 | 2207 | # Note: below, whenever we call runcode, we must sync history |
|
2211 | 2208 | # ourselves, because runcode is NOT meant to manage history at all. |
|
2212 | 2209 | if len(last.splitlines()) < 2: |
|
2213 | 2210 | # Get the main body to run as a cell |
|
2214 | 2211 | body = ''.join(blocks[:-1]) |
|
2215 | 2212 | self.input_hist.append(body) |
|
2216 | 2213 | self.input_hist_raw.append(body) |
|
2217 | 2214 | retcode = self.runcode(body, post_execute=False) |
|
2218 | 2215 | if retcode==0: |
|
2219 | 2216 | # And the last expression via runlines so it produces output |
|
2220 | 2217 | self.runlines(last) |
|
2221 | 2218 | else: |
|
2222 | 2219 | # Run the whole cell as one entity |
|
2223 | 2220 | self.input_hist.append(cell) |
|
2224 | 2221 | self.input_hist_raw.append(cell) |
|
2225 | 2222 | self.runcode(cell) |
|
2226 | 2223 | |
|
2227 | 2224 | def runlines(self, lines, clean=False): |
|
2228 | 2225 | """Run a string of one or more lines of source. |
|
2229 | 2226 | |
|
2230 | 2227 | This method is capable of running a string containing multiple source |
|
2231 | 2228 | lines, as if they had been entered at the IPython prompt. Since it |
|
2232 | 2229 | exposes IPython's processing machinery, the given strings can contain |
|
2233 | 2230 | magic calls (%magic), special shell access (!cmd), etc. |
|
2234 | 2231 | """ |
|
2235 | 2232 | |
|
2236 | 2233 | if isinstance(lines, (list, tuple)): |
|
2237 | 2234 | lines = '\n'.join(lines) |
|
2238 | 2235 | |
|
2239 | 2236 | if clean: |
|
2240 | 2237 | lines = self._cleanup_ipy_script(lines) |
|
2241 | 2238 | |
|
2242 | 2239 | # We must start with a clean buffer, in case this is run from an |
|
2243 | 2240 | # interactive IPython session (via a magic, for example). |
|
2244 | 2241 | self.resetbuffer() |
|
2245 | 2242 | lines = lines.splitlines() |
|
2246 | 2243 | more = 0 |
|
2247 | 2244 | with nested(self.builtin_trap, self.display_trap): |
|
2248 | 2245 | for line in lines: |
|
2249 | 2246 | # skip blank lines so we don't mess up the prompt counter, but |
|
2250 | 2247 | # do NOT skip even a blank line if we are in a code block (more |
|
2251 | 2248 | # is true) |
|
2252 | 2249 | |
|
2253 | 2250 | if line or more: |
|
2254 | 2251 | # push to raw history, so hist line numbers stay in sync |
|
2255 | 2252 | self.input_hist_raw.append(line + '\n') |
|
2256 | 2253 | prefiltered = self.prefilter_manager.prefilter_lines(line, |
|
2257 | 2254 | more) |
|
2258 | 2255 | more = self.push_line(prefiltered) |
|
2259 | 2256 | # IPython's runsource returns None if there was an error |
|
2260 | 2257 | # compiling the code. This allows us to stop processing |
|
2261 | 2258 | # right away, so the user gets the error message at the |
|
2262 | 2259 | # right place. |
|
2263 | 2260 | if more is None: |
|
2264 | 2261 | break |
|
2265 | 2262 | else: |
|
2266 | 2263 | self.input_hist_raw.append("\n") |
|
2267 | 2264 | # final newline in case the input didn't have it, so that the code |
|
2268 | 2265 | # actually does get executed |
|
2269 | 2266 | if more: |
|
2270 | 2267 | self.push_line('\n') |
|
2271 | 2268 | |
|
2272 | 2269 | def runsource(self, source, filename='<input>', symbol='single'): |
|
2273 | 2270 | """Compile and run some source in the interpreter. |
|
2274 | 2271 | |
|
2275 | 2272 | Arguments are as for compile_command(). |
|
2276 | 2273 | |
|
2277 | 2274 | One several things can happen: |
|
2278 | 2275 | |
|
2279 | 2276 | 1) The input is incorrect; compile_command() raised an |
|
2280 | 2277 | exception (SyntaxError or OverflowError). A syntax traceback |
|
2281 | 2278 | will be printed by calling the showsyntaxerror() method. |
|
2282 | 2279 | |
|
2283 | 2280 | 2) The input is incomplete, and more input is required; |
|
2284 | 2281 | compile_command() returned None. Nothing happens. |
|
2285 | 2282 | |
|
2286 | 2283 | 3) The input is complete; compile_command() returned a code |
|
2287 | 2284 | object. The code is executed by calling self.runcode() (which |
|
2288 | 2285 | also handles run-time exceptions, except for SystemExit). |
|
2289 | 2286 | |
|
2290 | 2287 | The return value is: |
|
2291 | 2288 | |
|
2292 | 2289 | - True in case 2 |
|
2293 | 2290 | |
|
2294 | 2291 | - False in the other cases, unless an exception is raised, where |
|
2295 | 2292 | None is returned instead. This can be used by external callers to |
|
2296 | 2293 | know whether to continue feeding input or not. |
|
2297 | 2294 | |
|
2298 | 2295 | The return value can be used to decide whether to use sys.ps1 or |
|
2299 | 2296 | sys.ps2 to prompt the next line.""" |
|
2300 | 2297 | |
|
2301 | 2298 | # We need to ensure that the source is unicode from here on. |
|
2302 | 2299 | if type(source)==str: |
|
2303 | 2300 | source = source.decode(self.stdin_encoding) |
|
2304 | 2301 | |
|
2305 | 2302 | # if the source code has leading blanks, add 'if 1:\n' to it |
|
2306 | 2303 | # this allows execution of indented pasted code. It is tempting |
|
2307 | 2304 | # to add '\n' at the end of source to run commands like ' a=1' |
|
2308 | 2305 | # directly, but this fails for more complicated scenarios |
|
2309 | 2306 | |
|
2310 | 2307 | if source[:1] in [' ', '\t']: |
|
2311 | 2308 | source = u'if 1:\n%s' % source |
|
2312 | 2309 | |
|
2313 | 2310 | try: |
|
2314 | 2311 | code = self.compile(source,filename,symbol) |
|
2315 | 2312 | except (OverflowError, SyntaxError, ValueError, TypeError, MemoryError): |
|
2316 | 2313 | # Case 1 |
|
2317 | 2314 | self.showsyntaxerror(filename) |
|
2318 | 2315 | return None |
|
2319 | 2316 | |
|
2320 | 2317 | if code is None: |
|
2321 | 2318 | # Case 2 |
|
2322 | 2319 | return True |
|
2323 | 2320 | |
|
2324 | 2321 | # Case 3 |
|
2325 | 2322 | # We store the code object so that threaded shells and |
|
2326 | 2323 | # custom exception handlers can access all this info if needed. |
|
2327 | 2324 | # The source corresponding to this can be obtained from the |
|
2328 | 2325 | # buffer attribute as '\n'.join(self.buffer). |
|
2329 | 2326 | self.code_to_run = code |
|
2330 | 2327 | # now actually execute the code object |
|
2331 | 2328 | if self.runcode(code) == 0: |
|
2332 | 2329 | return False |
|
2333 | 2330 | else: |
|
2334 | 2331 | return None |
|
2335 | 2332 | |
|
2336 | 2333 | def runcode(self, code_obj, post_execute=True): |
|
2337 | 2334 | """Execute a code object. |
|
2338 | 2335 | |
|
2339 | 2336 | When an exception occurs, self.showtraceback() is called to display a |
|
2340 | 2337 | traceback. |
|
2341 | 2338 | |
|
2342 | 2339 | Return value: a flag indicating whether the code to be run completed |
|
2343 | 2340 | successfully: |
|
2344 | 2341 | |
|
2345 | 2342 | - 0: successful execution. |
|
2346 | 2343 | - 1: an error occurred. |
|
2347 | 2344 | """ |
|
2348 | 2345 | |
|
2349 | 2346 | # Set our own excepthook in case the user code tries to call it |
|
2350 | 2347 | # directly, so that the IPython crash handler doesn't get triggered |
|
2351 | 2348 | old_excepthook,sys.excepthook = sys.excepthook, self.excepthook |
|
2352 | 2349 | |
|
2353 | 2350 | # we save the original sys.excepthook in the instance, in case config |
|
2354 | 2351 | # code (such as magics) needs access to it. |
|
2355 | 2352 | self.sys_excepthook = old_excepthook |
|
2356 | 2353 | outflag = 1 # happens in more places, so it's easier as default |
|
2357 | 2354 | try: |
|
2358 | 2355 | try: |
|
2359 | 2356 | self.hooks.pre_runcode_hook() |
|
2360 | 2357 | #rprint('Running code') # dbg |
|
2361 | 2358 | exec code_obj in self.user_global_ns, self.user_ns |
|
2362 | 2359 | finally: |
|
2363 | 2360 | # Reset our crash handler in place |
|
2364 | 2361 | sys.excepthook = old_excepthook |
|
2365 | 2362 | except SystemExit: |
|
2366 | 2363 | self.resetbuffer() |
|
2367 | 2364 | self.showtraceback(exception_only=True) |
|
2368 | 2365 | warn("To exit: use any of 'exit', 'quit', %Exit or Ctrl-D.", level=1) |
|
2369 | 2366 | except self.custom_exceptions: |
|
2370 | 2367 | etype,value,tb = sys.exc_info() |
|
2371 | 2368 | self.CustomTB(etype,value,tb) |
|
2372 | 2369 | except: |
|
2373 | 2370 | self.showtraceback() |
|
2374 | 2371 | else: |
|
2375 | 2372 | outflag = 0 |
|
2376 | 2373 | if softspace(sys.stdout, 0): |
|
2377 | 2374 | |
|
2378 | 2375 | |
|
2379 | 2376 | # Execute any registered post-execution functions. Here, any errors |
|
2380 | 2377 | # are reported only minimally and just on the terminal, because the |
|
2381 | 2378 | # main exception channel may be occupied with a user traceback. |
|
2382 | 2379 | # FIXME: we need to think this mechanism a little more carefully. |
|
2383 | 2380 | if post_execute: |
|
2384 | 2381 | for func in self._post_execute: |
|
2385 | 2382 | try: |
|
2386 | 2383 | func() |
|
2387 | 2384 | except: |
|
2388 | 2385 | head = '[ ERROR ] Evaluating post_execute function: %s' % \ |
|
2389 | 2386 | func |
|
2390 | 2387 | print >> io.Term.cout, head |
|
2391 | 2388 | print >> io.Term.cout, self._simple_error() |
|
2392 | 2389 | print >> io.Term.cout, 'Removing from post_execute' |
|
2393 | 2390 | self._post_execute.remove(func) |
|
2394 | 2391 | |
|
2395 | 2392 | # Flush out code object which has been run (and source) |
|
2396 | 2393 | self.code_to_run = None |
|
2397 | 2394 | return outflag |
|
2398 | 2395 | |
|
2399 | 2396 | def push_line(self, line): |
|
2400 | 2397 | """Push a line to the interpreter. |
|
2401 | 2398 | |
|
2402 | 2399 | The line should not have a trailing newline; it may have |
|
2403 | 2400 | internal newlines. The line is appended to a buffer and the |
|
2404 | 2401 | interpreter's runsource() method is called with the |
|
2405 | 2402 | concatenated contents of the buffer as source. If this |
|
2406 | 2403 | indicates that the command was executed or invalid, the buffer |
|
2407 | 2404 | is reset; otherwise, the command is incomplete, and the buffer |
|
2408 | 2405 | is left as it was after the line was appended. The return |
|
2409 | 2406 | value is 1 if more input is required, 0 if the line was dealt |
|
2410 | 2407 | with in some way (this is the same as runsource()). |
|
2411 | 2408 | """ |
|
2412 | 2409 | |
|
2413 | 2410 | # autoindent management should be done here, and not in the |
|
2414 | 2411 | # interactive loop, since that one is only seen by keyboard input. We |
|
2415 | 2412 | # need this done correctly even for code run via runlines (which uses |
|
2416 | 2413 | # push). |
|
2417 | 2414 | |
|
2418 | 2415 | #print 'push line: <%s>' % line # dbg |
|
2419 | 2416 | for subline in line.splitlines(): |
|
2420 | 2417 | self._autoindent_update(subline) |
|
2421 | 2418 | self.buffer.append(line) |
|
2422 | 2419 | more = self.runsource('\n'.join(self.buffer), self.filename) |
|
2423 | 2420 | if not more: |
|
2424 | 2421 | self.resetbuffer() |
|
2425 | 2422 | return more |
|
2426 | 2423 | |
|
2427 | 2424 | def resetbuffer(self): |
|
2428 | 2425 | """Reset the input buffer.""" |
|
2429 | 2426 | self.buffer[:] = [] |
|
2430 | 2427 | |
|
2431 | 2428 | def _is_secondary_block_start(self, s): |
|
2432 | 2429 | if not s.endswith(':'): |
|
2433 | 2430 | return False |
|
2434 | 2431 | if (s.startswith('elif') or |
|
2435 | 2432 | s.startswith('else') or |
|
2436 | 2433 | s.startswith('except') or |
|
2437 | 2434 | s.startswith('finally')): |
|
2438 | 2435 | return True |
|
2439 | 2436 | |
|
2440 | 2437 | def _cleanup_ipy_script(self, script): |
|
2441 | 2438 | """Make a script safe for self.runlines() |
|
2442 | 2439 | |
|
2443 | 2440 | Currently, IPython is lines based, with blocks being detected by |
|
2444 | 2441 | empty lines. This is a problem for block based scripts that may |
|
2445 | 2442 | not have empty lines after blocks. This script adds those empty |
|
2446 | 2443 | lines to make scripts safe for running in the current line based |
|
2447 | 2444 | IPython. |
|
2448 | 2445 | """ |
|
2449 | 2446 | res = [] |
|
2450 | 2447 | lines = script.splitlines() |
|
2451 | 2448 | level = 0 |
|
2452 | 2449 | |
|
2453 | 2450 | for l in lines: |
|
2454 | 2451 | lstripped = l.lstrip() |
|
2455 | 2452 | stripped = l.strip() |
|
2456 | 2453 | if not stripped: |
|
2457 | 2454 | continue |
|
2458 | 2455 | newlevel = len(l) - len(lstripped) |
|
2459 | 2456 | if level > 0 and newlevel == 0 and \ |
|
2460 | 2457 | not self._is_secondary_block_start(stripped): |
|
2461 | 2458 | # add empty line |
|
2462 | 2459 | res.append('') |
|
2463 | 2460 | res.append(l) |
|
2464 | 2461 | level = newlevel |
|
2465 | 2462 | |
|
2466 | 2463 | return '\n'.join(res) + '\n' |
|
2467 | 2464 | |
|
2468 | 2465 | def _autoindent_update(self,line): |
|
2469 | 2466 | """Keep track of the indent level.""" |
|
2470 | 2467 | |
|
2471 | 2468 | #debugx('line') |
|
2472 | 2469 | #debugx('self.indent_current_nsp') |
|
2473 | 2470 | if self.autoindent: |
|
2474 | 2471 | if line: |
|
2475 | 2472 | inisp = num_ini_spaces(line) |
|
2476 | 2473 | if inisp < self.indent_current_nsp: |
|
2477 | 2474 | self.indent_current_nsp = inisp |
|
2478 | 2475 | |
|
2479 | 2476 | if line[-1] == ':': |
|
2480 | 2477 | self.indent_current_nsp += 4 |
|
2481 | 2478 | elif dedent_re.match(line): |
|
2482 | 2479 | self.indent_current_nsp -= 4 |
|
2483 | 2480 | else: |
|
2484 | 2481 | self.indent_current_nsp = 0 |
|
2485 | 2482 | |
|
2486 | 2483 | #------------------------------------------------------------------------- |
|
2487 | 2484 | # Things related to GUI support and pylab |
|
2488 | 2485 | #------------------------------------------------------------------------- |
|
2489 | 2486 | |
|
2490 | 2487 | def enable_pylab(self, gui=None): |
|
2491 | 2488 | raise NotImplementedError('Implement enable_pylab in a subclass') |
|
2492 | 2489 | |
|
2493 | 2490 | #------------------------------------------------------------------------- |
|
2494 | 2491 | # Utilities |
|
2495 | 2492 | #------------------------------------------------------------------------- |
|
2496 | 2493 | |
|
2497 | 2494 | def var_expand(self,cmd,depth=0): |
|
2498 | 2495 | """Expand python variables in a string. |
|
2499 | 2496 | |
|
2500 | 2497 | The depth argument indicates how many frames above the caller should |
|
2501 | 2498 | be walked to look for the local namespace where to expand variables. |
|
2502 | 2499 | |
|
2503 | 2500 | The global namespace for expansion is always the user's interactive |
|
2504 | 2501 | namespace. |
|
2505 | 2502 | """ |
|
2506 | 2503 | |
|
2507 | 2504 | return str(ItplNS(cmd, |
|
2508 | 2505 | self.user_ns, # globals |
|
2509 | 2506 | # Skip our own frame in searching for locals: |
|
2510 | 2507 | sys._getframe(depth+1).f_locals # locals |
|
2511 | 2508 | )) |
|
2512 | 2509 | |
|
2513 | 2510 | def mktempfile(self,data=None): |
|
2514 | 2511 | """Make a new tempfile and return its filename. |
|
2515 | 2512 | |
|
2516 | 2513 | This makes a call to tempfile.mktemp, but it registers the created |
|
2517 | 2514 | filename internally so ipython cleans it up at exit time. |
|
2518 | 2515 | |
|
2519 | 2516 | Optional inputs: |
|
2520 | 2517 | |
|
2521 | 2518 | - data(None): if data is given, it gets written out to the temp file |
|
2522 | 2519 | immediately, and the file is closed again.""" |
|
2523 | 2520 | |
|
2524 | 2521 | filename = tempfile.mktemp('.py','ipython_edit_') |
|
2525 | 2522 | self.tempfiles.append(filename) |
|
2526 | 2523 | |
|
2527 | 2524 | if data: |
|
2528 | 2525 | tmp_file = open(filename,'w') |
|
2529 | 2526 | tmp_file.write(data) |
|
2530 | 2527 | tmp_file.close() |
|
2531 | 2528 | return filename |
|
2532 | 2529 | |
|
2533 | 2530 | # TODO: This should be removed when Term is refactored. |
|
2534 | 2531 | def write(self,data): |
|
2535 | 2532 | """Write a string to the default output""" |
|
2536 | 2533 | io.Term.cout.write(data) |
|
2537 | 2534 | |
|
2538 | 2535 | # TODO: This should be removed when Term is refactored. |
|
2539 | 2536 | def write_err(self,data): |
|
2540 | 2537 | """Write a string to the default error output""" |
|
2541 | 2538 | io.Term.cerr.write(data) |
|
2542 | 2539 | |
|
2543 | 2540 | def ask_yes_no(self,prompt,default=True): |
|
2544 | 2541 | if self.quiet: |
|
2545 | 2542 | return True |
|
2546 | 2543 | return ask_yes_no(prompt,default) |
|
2547 | 2544 | |
|
2548 | 2545 | def show_usage(self): |
|
2549 | 2546 | """Show a usage message""" |
|
2550 | 2547 | page.page(IPython.core.usage.interactive_usage) |
|
2551 | 2548 | |
|
2552 | 2549 | #------------------------------------------------------------------------- |
|
2553 | 2550 | # Things related to IPython exiting |
|
2554 | 2551 | #------------------------------------------------------------------------- |
|
2555 | 2552 | def atexit_operations(self): |
|
2556 | 2553 | """This will be executed at the time of exit. |
|
2557 | 2554 | |
|
2558 | 2555 | Cleanup operations and saving of persistent data that is done |
|
2559 | 2556 | unconditionally by IPython should be performed here. |
|
2560 | 2557 | |
|
2561 | 2558 | For things that may depend on startup flags or platform specifics (such |
|
2562 | 2559 | as having readline or not), register a separate atexit function in the |
|
2563 | 2560 | code that has the appropriate information, rather than trying to |
|
2564 | 2561 | clutter |
|
2565 | 2562 | """ |
|
2566 | 2563 | # Cleanup all tempfiles left around |
|
2567 | 2564 | for tfile in self.tempfiles: |
|
2568 | 2565 | try: |
|
2569 | 2566 | os.unlink(tfile) |
|
2570 | 2567 | except OSError: |
|
2571 | 2568 | pass |
|
2572 | 2569 | |
|
2573 | 2570 | # Clear all user namespaces to release all references cleanly. |
|
2574 | 2571 | self.reset() |
|
2575 | 2572 | |
|
2576 | 2573 | # Run user hooks |
|
2577 | 2574 | self.hooks.shutdown_hook() |
|
2578 | 2575 | |
|
2579 | 2576 | def cleanup(self): |
|
2580 | 2577 | self.restore_sys_module_state() |
|
2581 | 2578 | |
|
2582 | 2579 | |
|
2583 | 2580 | class InteractiveShellABC(object): |
|
2584 | 2581 | """An abstract base class for InteractiveShell.""" |
|
2585 | 2582 | __metaclass__ = abc.ABCMeta |
|
2586 | 2583 | |
|
2587 | 2584 | InteractiveShellABC.register(InteractiveShell) |
@@ -1,1014 +1,1014 b'' | |||
|
1 | 1 | #!/usr/bin/env python |
|
2 | 2 | # encoding: utf-8 |
|
3 | 3 | """ |
|
4 | 4 | Prefiltering components. |
|
5 | 5 | |
|
6 | 6 | Prefilters transform user input before it is exec'd by Python. These |
|
7 | 7 | transforms are used to implement additional syntax such as !ls and %magic. |
|
8 | 8 | |
|
9 | 9 | Authors: |
|
10 | 10 | |
|
11 | 11 | * Brian Granger |
|
12 | 12 | * Fernando Perez |
|
13 | 13 | * Dan Milstein |
|
14 | 14 | * Ville Vainio |
|
15 | 15 | """ |
|
16 | 16 | |
|
17 | 17 | #----------------------------------------------------------------------------- |
|
18 | 18 | # Copyright (C) 2008-2009 The IPython Development Team |
|
19 | 19 | # |
|
20 | 20 | # Distributed under the terms of the BSD License. The full license is in |
|
21 | 21 | # the file COPYING, distributed as part of this software. |
|
22 | 22 | #----------------------------------------------------------------------------- |
|
23 | 23 | |
|
24 | 24 | #----------------------------------------------------------------------------- |
|
25 | 25 | # Imports |
|
26 | 26 | #----------------------------------------------------------------------------- |
|
27 | 27 | |
|
28 | 28 | import __builtin__ |
|
29 | 29 | import codeop |
|
30 | 30 | import re |
|
31 | 31 | |
|
32 | 32 | from IPython.core.alias import AliasManager |
|
33 | 33 | from IPython.core.autocall import IPyAutocall |
|
34 | 34 | from IPython.config.configurable import Configurable |
|
35 | 35 | from IPython.core.splitinput import split_user_input |
|
36 | 36 | from IPython.core import page |
|
37 | 37 | |
|
38 | 38 | from IPython.utils.traitlets import List, Int, Any, Str, CBool, Bool, Instance |
|
39 | 39 | import IPython.utils.io |
|
40 | 40 | from IPython.utils.text import make_quoted_expr |
|
41 | 41 | from IPython.utils.autoattr import auto_attr |
|
42 | 42 | |
|
43 | 43 | #----------------------------------------------------------------------------- |
|
44 | 44 | # Global utilities, errors and constants |
|
45 | 45 | #----------------------------------------------------------------------------- |
|
46 | 46 | |
|
47 | 47 | # Warning, these cannot be changed unless various regular expressions |
|
48 | 48 | # are updated in a number of places. Not great, but at least we told you. |
|
49 | 49 | ESC_SHELL = '!' |
|
50 | 50 | ESC_SH_CAP = '!!' |
|
51 | 51 | ESC_HELP = '?' |
|
52 | 52 | ESC_MAGIC = '%' |
|
53 | 53 | ESC_QUOTE = ',' |
|
54 | 54 | ESC_QUOTE2 = ';' |
|
55 | 55 | ESC_PAREN = '/' |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | class PrefilterError(Exception): |
|
59 | 59 | pass |
|
60 | 60 | |
|
61 | 61 | |
|
62 | 62 | # RegExp to identify potential function names |
|
63 | 63 | re_fun_name = re.compile(r'[a-zA-Z_]([a-zA-Z0-9_.]*) *$') |
|
64 | 64 | |
|
65 | 65 | # RegExp to exclude strings with this start from autocalling. In |
|
66 | 66 | # particular, all binary operators should be excluded, so that if foo is |
|
67 | 67 | # callable, foo OP bar doesn't become foo(OP bar), which is invalid. The |
|
68 | 68 | # characters '!=()' don't need to be checked for, as the checkPythonChars |
|
69 | 69 | # routine explicitely does so, to catch direct calls and rebindings of |
|
70 | 70 | # existing names. |
|
71 | 71 | |
|
72 | 72 | # Warning: the '-' HAS TO BE AT THE END of the first group, otherwise |
|
73 | 73 | # it affects the rest of the group in square brackets. |
|
74 | 74 | re_exclude_auto = re.compile(r'^[,&^\|\*/\+-]' |
|
75 | 75 | r'|^is |^not |^in |^and |^or ') |
|
76 | 76 | |
|
77 | 77 | # try to catch also methods for stuff in lists/tuples/dicts: off |
|
78 | 78 | # (experimental). For this to work, the line_split regexp would need |
|
79 | 79 | # to be modified so it wouldn't break things at '['. That line is |
|
80 | 80 | # nasty enough that I shouldn't change it until I can test it _well_. |
|
81 | 81 | #self.re_fun_name = re.compile (r'[a-zA-Z_]([a-zA-Z0-9_.\[\]]*) ?$') |
|
82 | 82 | |
|
83 | 83 | |
|
84 | 84 | # Handler Check Utilities |
|
85 | 85 | def is_shadowed(identifier, ip): |
|
86 | 86 | """Is the given identifier defined in one of the namespaces which shadow |
|
87 | 87 | the alias and magic namespaces? Note that an identifier is different |
|
88 | 88 | than ifun, because it can not contain a '.' character.""" |
|
89 | 89 | # This is much safer than calling ofind, which can change state |
|
90 | 90 | return (identifier in ip.user_ns \ |
|
91 | 91 | or identifier in ip.internal_ns \ |
|
92 | 92 | or identifier in ip.ns_table['builtin']) |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | #----------------------------------------------------------------------------- |
|
96 | 96 | # The LineInfo class used throughout |
|
97 | 97 | #----------------------------------------------------------------------------- |
|
98 | 98 | |
|
99 | 99 | |
|
100 | 100 | class LineInfo(object): |
|
101 | 101 | """A single line of input and associated info. |
|
102 | 102 | |
|
103 | 103 | Includes the following as properties: |
|
104 | 104 | |
|
105 | 105 | line |
|
106 | 106 | The original, raw line |
|
107 | 107 | |
|
108 | 108 | continue_prompt |
|
109 | 109 | Is this line a continuation in a sequence of multiline input? |
|
110 | 110 | |
|
111 | 111 | pre |
|
112 | 112 | The initial esc character or whitespace. |
|
113 | 113 | |
|
114 | 114 | pre_char |
|
115 | 115 | The escape character(s) in pre or the empty string if there isn't one. |
|
116 | 116 | Note that '!!' is a possible value for pre_char. Otherwise it will |
|
117 | 117 | always be a single character. |
|
118 | 118 | |
|
119 | 119 | pre_whitespace |
|
120 | 120 | The leading whitespace from pre if it exists. If there is a pre_char, |
|
121 | 121 | this is just ''. |
|
122 | 122 | |
|
123 | 123 | ifun |
|
124 | 124 | The 'function part', which is basically the maximal initial sequence |
|
125 | 125 | of valid python identifiers and the '.' character. This is what is |
|
126 | 126 | checked for alias and magic transformations, used for auto-calling, |
|
127 | 127 | etc. |
|
128 | 128 | |
|
129 | 129 | the_rest |
|
130 | 130 | Everything else on the line. |
|
131 | 131 | """ |
|
132 | 132 | def __init__(self, line, continue_prompt): |
|
133 | 133 | self.line = line |
|
134 | 134 | self.continue_prompt = continue_prompt |
|
135 | 135 | self.pre, self.ifun, self.the_rest = split_user_input(line) |
|
136 | 136 | |
|
137 | 137 | self.pre_char = self.pre.strip() |
|
138 | 138 | if self.pre_char: |
|
139 | 139 | self.pre_whitespace = '' # No whitespace allowd before esc chars |
|
140 | 140 | else: |
|
141 | 141 | self.pre_whitespace = self.pre |
|
142 | 142 | |
|
143 | 143 | self._oinfo = None |
|
144 | 144 | |
|
145 | 145 | def ofind(self, ip): |
|
146 | 146 | """Do a full, attribute-walking lookup of the ifun in the various |
|
147 | 147 | namespaces for the given IPython InteractiveShell instance. |
|
148 | 148 | |
|
149 | 149 | Return a dict with keys: found,obj,ospace,ismagic |
|
150 | 150 | |
|
151 | 151 | Note: can cause state changes because of calling getattr, but should |
|
152 | 152 | only be run if autocall is on and if the line hasn't matched any |
|
153 | 153 | other, less dangerous handlers. |
|
154 | 154 | |
|
155 | 155 | Does cache the results of the call, so can be called multiple times |
|
156 | 156 | without worrying about *further* damaging state. |
|
157 | 157 | """ |
|
158 | 158 | if not self._oinfo: |
|
159 | 159 | # ip.shell._ofind is actually on the Magic class! |
|
160 | 160 | self._oinfo = ip.shell._ofind(self.ifun) |
|
161 | 161 | return self._oinfo |
|
162 | 162 | |
|
163 | 163 | def __str__(self): |
|
164 | 164 | return "Lineinfo [%s|%s|%s]" %(self.pre, self.ifun, self.the_rest) |
|
165 | 165 | |
|
166 | 166 | |
|
167 | 167 | #----------------------------------------------------------------------------- |
|
168 | 168 | # Main Prefilter manager |
|
169 | 169 | #----------------------------------------------------------------------------- |
|
170 | 170 | |
|
171 | 171 | |
|
172 | 172 | class PrefilterManager(Configurable): |
|
173 | 173 | """Main prefilter component. |
|
174 | 174 | |
|
175 | 175 | The IPython prefilter is run on all user input before it is run. The |
|
176 | 176 | prefilter consumes lines of input and produces transformed lines of |
|
177 | 177 | input. |
|
178 | 178 | |
|
179 | 179 | The iplementation consists of two phases: |
|
180 | 180 | |
|
181 | 181 | 1. Transformers |
|
182 | 182 | 2. Checkers and handlers |
|
183 | 183 | |
|
184 | 184 | Over time, we plan on deprecating the checkers and handlers and doing |
|
185 | 185 | everything in the transformers. |
|
186 | 186 | |
|
187 | 187 | The transformers are instances of :class:`PrefilterTransformer` and have |
|
188 | 188 | a single method :meth:`transform` that takes a line and returns a |
|
189 | 189 | transformed line. The transformation can be accomplished using any |
|
190 | 190 | tool, but our current ones use regular expressions for speed. We also |
|
191 | 191 | ship :mod:`pyparsing` in :mod:`IPython.external` for use in transformers. |
|
192 | 192 | |
|
193 | 193 | After all the transformers have been run, the line is fed to the checkers, |
|
194 | 194 | which are instances of :class:`PrefilterChecker`. The line is passed to |
|
195 | 195 | the :meth:`check` method, which either returns `None` or a |
|
196 | 196 | :class:`PrefilterHandler` instance. If `None` is returned, the other |
|
197 | 197 | checkers are tried. If an :class:`PrefilterHandler` instance is returned, |
|
198 | 198 | the line is passed to the :meth:`handle` method of the returned |
|
199 | 199 | handler and no further checkers are tried. |
|
200 | 200 | |
|
201 | 201 | Both transformers and checkers have a `priority` attribute, that determines |
|
202 | 202 | the order in which they are called. Smaller priorities are tried first. |
|
203 | 203 | |
|
204 | 204 | Both transformers and checkers also have `enabled` attribute, which is |
|
205 | 205 | a boolean that determines if the instance is used. |
|
206 | 206 | |
|
207 | 207 | Users or developers can change the priority or enabled attribute of |
|
208 | 208 | transformers or checkers, but they must call the :meth:`sort_checkers` |
|
209 | 209 | or :meth:`sort_transformers` method after changing the priority. |
|
210 | 210 | """ |
|
211 | 211 | |
|
212 | 212 | multi_line_specials = CBool(True, config=True) |
|
213 | 213 | shell = Instance('IPython.core.interactiveshell.InteractiveShellABC') |
|
214 | 214 | |
|
215 | 215 | def __init__(self, shell=None, config=None): |
|
216 | 216 | super(PrefilterManager, self).__init__(shell=shell, config=config) |
|
217 | 217 | self.shell = shell |
|
218 | 218 | self.init_transformers() |
|
219 | 219 | self.init_handlers() |
|
220 | 220 | self.init_checkers() |
|
221 | 221 | |
|
222 | 222 | #------------------------------------------------------------------------- |
|
223 | 223 | # API for managing transformers |
|
224 | 224 | #------------------------------------------------------------------------- |
|
225 | 225 | |
|
226 | 226 | def init_transformers(self): |
|
227 | 227 | """Create the default transformers.""" |
|
228 | 228 | self._transformers = [] |
|
229 | 229 | for transformer_cls in _default_transformers: |
|
230 | 230 | transformer_cls( |
|
231 | 231 | shell=self.shell, prefilter_manager=self, config=self.config |
|
232 | 232 | ) |
|
233 | 233 | |
|
234 | 234 | def sort_transformers(self): |
|
235 | 235 | """Sort the transformers by priority. |
|
236 | 236 | |
|
237 | 237 | This must be called after the priority of a transformer is changed. |
|
238 | 238 | The :meth:`register_transformer` method calls this automatically. |
|
239 | 239 | """ |
|
240 |
self._transformers.sort( |
|
|
240 | self._transformers.sort(key=lambda x: x.priority) | |
|
241 | 241 | |
|
242 | 242 | @property |
|
243 | 243 | def transformers(self): |
|
244 | 244 | """Return a list of checkers, sorted by priority.""" |
|
245 | 245 | return self._transformers |
|
246 | 246 | |
|
247 | 247 | def register_transformer(self, transformer): |
|
248 | 248 | """Register a transformer instance.""" |
|
249 | 249 | if transformer not in self._transformers: |
|
250 | 250 | self._transformers.append(transformer) |
|
251 | 251 | self.sort_transformers() |
|
252 | 252 | |
|
253 | 253 | def unregister_transformer(self, transformer): |
|
254 | 254 | """Unregister a transformer instance.""" |
|
255 | 255 | if transformer in self._transformers: |
|
256 | 256 | self._transformers.remove(transformer) |
|
257 | 257 | |
|
258 | 258 | #------------------------------------------------------------------------- |
|
259 | 259 | # API for managing checkers |
|
260 | 260 | #------------------------------------------------------------------------- |
|
261 | 261 | |
|
262 | 262 | def init_checkers(self): |
|
263 | 263 | """Create the default checkers.""" |
|
264 | 264 | self._checkers = [] |
|
265 | 265 | for checker in _default_checkers: |
|
266 | 266 | checker( |
|
267 | 267 | shell=self.shell, prefilter_manager=self, config=self.config |
|
268 | 268 | ) |
|
269 | 269 | |
|
270 | 270 | def sort_checkers(self): |
|
271 | 271 | """Sort the checkers by priority. |
|
272 | 272 | |
|
273 | 273 | This must be called after the priority of a checker is changed. |
|
274 | 274 | The :meth:`register_checker` method calls this automatically. |
|
275 | 275 | """ |
|
276 |
self._checkers.sort( |
|
|
276 | self._checkers.sort(key=lambda x: x.priority) | |
|
277 | 277 | |
|
278 | 278 | @property |
|
279 | 279 | def checkers(self): |
|
280 | 280 | """Return a list of checkers, sorted by priority.""" |
|
281 | 281 | return self._checkers |
|
282 | 282 | |
|
283 | 283 | def register_checker(self, checker): |
|
284 | 284 | """Register a checker instance.""" |
|
285 | 285 | if checker not in self._checkers: |
|
286 | 286 | self._checkers.append(checker) |
|
287 | 287 | self.sort_checkers() |
|
288 | 288 | |
|
289 | 289 | def unregister_checker(self, checker): |
|
290 | 290 | """Unregister a checker instance.""" |
|
291 | 291 | if checker in self._checkers: |
|
292 | 292 | self._checkers.remove(checker) |
|
293 | 293 | |
|
294 | 294 | #------------------------------------------------------------------------- |
|
295 | 295 | # API for managing checkers |
|
296 | 296 | #------------------------------------------------------------------------- |
|
297 | 297 | |
|
298 | 298 | def init_handlers(self): |
|
299 | 299 | """Create the default handlers.""" |
|
300 | 300 | self._handlers = {} |
|
301 | 301 | self._esc_handlers = {} |
|
302 | 302 | for handler in _default_handlers: |
|
303 | 303 | handler( |
|
304 | 304 | shell=self.shell, prefilter_manager=self, config=self.config |
|
305 | 305 | ) |
|
306 | 306 | |
|
307 | 307 | @property |
|
308 | 308 | def handlers(self): |
|
309 | 309 | """Return a dict of all the handlers.""" |
|
310 | 310 | return self._handlers |
|
311 | 311 | |
|
312 | 312 | def register_handler(self, name, handler, esc_strings): |
|
313 | 313 | """Register a handler instance by name with esc_strings.""" |
|
314 | 314 | self._handlers[name] = handler |
|
315 | 315 | for esc_str in esc_strings: |
|
316 | 316 | self._esc_handlers[esc_str] = handler |
|
317 | 317 | |
|
318 | 318 | def unregister_handler(self, name, handler, esc_strings): |
|
319 | 319 | """Unregister a handler instance by name with esc_strings.""" |
|
320 | 320 | try: |
|
321 | 321 | del self._handlers[name] |
|
322 | 322 | except KeyError: |
|
323 | 323 | pass |
|
324 | 324 | for esc_str in esc_strings: |
|
325 | 325 | h = self._esc_handlers.get(esc_str) |
|
326 | 326 | if h is handler: |
|
327 | 327 | del self._esc_handlers[esc_str] |
|
328 | 328 | |
|
329 | 329 | def get_handler_by_name(self, name): |
|
330 | 330 | """Get a handler by its name.""" |
|
331 | 331 | return self._handlers.get(name) |
|
332 | 332 | |
|
333 | 333 | def get_handler_by_esc(self, esc_str): |
|
334 | 334 | """Get a handler by its escape string.""" |
|
335 | 335 | return self._esc_handlers.get(esc_str) |
|
336 | 336 | |
|
337 | 337 | #------------------------------------------------------------------------- |
|
338 | 338 | # Main prefiltering API |
|
339 | 339 | #------------------------------------------------------------------------- |
|
340 | 340 | |
|
341 | 341 | def prefilter_line_info(self, line_info): |
|
342 | 342 | """Prefilter a line that has been converted to a LineInfo object. |
|
343 | 343 | |
|
344 | 344 | This implements the checker/handler part of the prefilter pipe. |
|
345 | 345 | """ |
|
346 | 346 | # print "prefilter_line_info: ", line_info |
|
347 | 347 | handler = self.find_handler(line_info) |
|
348 | 348 | return handler.handle(line_info) |
|
349 | 349 | |
|
350 | 350 | def find_handler(self, line_info): |
|
351 | 351 | """Find a handler for the line_info by trying checkers.""" |
|
352 | 352 | for checker in self.checkers: |
|
353 | 353 | if checker.enabled: |
|
354 | 354 | handler = checker.check(line_info) |
|
355 | 355 | if handler: |
|
356 | 356 | return handler |
|
357 | 357 | return self.get_handler_by_name('normal') |
|
358 | 358 | |
|
359 | 359 | def transform_line(self, line, continue_prompt): |
|
360 | 360 | """Calls the enabled transformers in order of increasing priority.""" |
|
361 | 361 | for transformer in self.transformers: |
|
362 | 362 | if transformer.enabled: |
|
363 | 363 | line = transformer.transform(line, continue_prompt) |
|
364 | 364 | return line |
|
365 | 365 | |
|
366 | 366 | def prefilter_line(self, line, continue_prompt=False): |
|
367 | 367 | """Prefilter a single input line as text. |
|
368 | 368 | |
|
369 | 369 | This method prefilters a single line of text by calling the |
|
370 | 370 | transformers and then the checkers/handlers. |
|
371 | 371 | """ |
|
372 | 372 | |
|
373 | 373 | # print "prefilter_line: ", line, continue_prompt |
|
374 | 374 | # All handlers *must* return a value, even if it's blank (''). |
|
375 | 375 | |
|
376 | 376 | # Lines are NOT logged here. Handlers should process the line as |
|
377 | 377 | # needed, update the cache AND log it (so that the input cache array |
|
378 | 378 | # stays synced). |
|
379 | 379 | |
|
380 | 380 | # save the line away in case we crash, so the post-mortem handler can |
|
381 | 381 | # record it |
|
382 | 382 | self.shell._last_input_line = line |
|
383 | 383 | |
|
384 | 384 | if not line: |
|
385 | 385 | # Return immediately on purely empty lines, so that if the user |
|
386 | 386 | # previously typed some whitespace that started a continuation |
|
387 | 387 | # prompt, he can break out of that loop with just an empty line. |
|
388 | 388 | # This is how the default python prompt works. |
|
389 | 389 | |
|
390 | 390 | # Only return if the accumulated input buffer was just whitespace! |
|
391 | 391 | if ''.join(self.shell.buffer).isspace(): |
|
392 | 392 | self.shell.buffer[:] = [] |
|
393 | 393 | return '' |
|
394 | 394 | |
|
395 | 395 | # At this point, we invoke our transformers. |
|
396 | 396 | if not continue_prompt or (continue_prompt and self.multi_line_specials): |
|
397 | 397 | line = self.transform_line(line, continue_prompt) |
|
398 | 398 | |
|
399 | 399 | # Now we compute line_info for the checkers and handlers |
|
400 | 400 | line_info = LineInfo(line, continue_prompt) |
|
401 | 401 | |
|
402 | 402 | # the input history needs to track even empty lines |
|
403 | 403 | stripped = line.strip() |
|
404 | 404 | |
|
405 | 405 | normal_handler = self.get_handler_by_name('normal') |
|
406 | 406 | if not stripped: |
|
407 | 407 | if not continue_prompt: |
|
408 | 408 | self.shell.displayhook.prompt_count -= 1 |
|
409 | 409 | |
|
410 | 410 | return normal_handler.handle(line_info) |
|
411 | 411 | |
|
412 | 412 | # special handlers are only allowed for single line statements |
|
413 | 413 | if continue_prompt and not self.multi_line_specials: |
|
414 | 414 | return normal_handler.handle(line_info) |
|
415 | 415 | |
|
416 | 416 | prefiltered = self.prefilter_line_info(line_info) |
|
417 | 417 | # print "prefiltered line: %r" % prefiltered |
|
418 | 418 | return prefiltered |
|
419 | 419 | |
|
420 | 420 | def prefilter_lines(self, lines, continue_prompt=False): |
|
421 | 421 | """Prefilter multiple input lines of text. |
|
422 | 422 | |
|
423 | 423 | This is the main entry point for prefiltering multiple lines of |
|
424 | 424 | input. This simply calls :meth:`prefilter_line` for each line of |
|
425 | 425 | input. |
|
426 | 426 | |
|
427 | 427 | This covers cases where there are multiple lines in the user entry, |
|
428 | 428 | which is the case when the user goes back to a multiline history |
|
429 | 429 | entry and presses enter. |
|
430 | 430 | """ |
|
431 | 431 | llines = lines.rstrip('\n').split('\n') |
|
432 | 432 | # We can get multiple lines in one shot, where multiline input 'blends' |
|
433 | 433 | # into one line, in cases like recalling from the readline history |
|
434 | 434 | # buffer. We need to make sure that in such cases, we correctly |
|
435 | 435 | # communicate downstream which line is first and which are continuation |
|
436 | 436 | # ones. |
|
437 | 437 | if len(llines) > 1: |
|
438 | 438 | out = '\n'.join([self.prefilter_line(line, lnum>0) |
|
439 | 439 | for lnum, line in enumerate(llines) ]) |
|
440 | 440 | else: |
|
441 | 441 | out = self.prefilter_line(llines[0], continue_prompt) |
|
442 | 442 | |
|
443 | 443 | return out |
|
444 | 444 | |
|
445 | 445 | #----------------------------------------------------------------------------- |
|
446 | 446 | # Prefilter transformers |
|
447 | 447 | #----------------------------------------------------------------------------- |
|
448 | 448 | |
|
449 | 449 | |
|
450 | 450 | class PrefilterTransformer(Configurable): |
|
451 | 451 | """Transform a line of user input.""" |
|
452 | 452 | |
|
453 | 453 | priority = Int(100, config=True) |
|
454 | 454 | # Transformers don't currently use shell or prefilter_manager, but as we |
|
455 | 455 | # move away from checkers and handlers, they will need them. |
|
456 | 456 | shell = Instance('IPython.core.interactiveshell.InteractiveShellABC') |
|
457 | 457 | prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager') |
|
458 | 458 | enabled = Bool(True, config=True) |
|
459 | 459 | |
|
460 | 460 | def __init__(self, shell=None, prefilter_manager=None, config=None): |
|
461 | 461 | super(PrefilterTransformer, self).__init__( |
|
462 | 462 | shell=shell, prefilter_manager=prefilter_manager, config=config |
|
463 | 463 | ) |
|
464 | 464 | self.prefilter_manager.register_transformer(self) |
|
465 | 465 | |
|
466 | 466 | def transform(self, line, continue_prompt): |
|
467 | 467 | """Transform a line, returning the new one.""" |
|
468 | 468 | return None |
|
469 | 469 | |
|
470 | 470 | def __repr__(self): |
|
471 | 471 | return "<%s(priority=%r, enabled=%r)>" % ( |
|
472 | 472 | self.__class__.__name__, self.priority, self.enabled) |
|
473 | 473 | |
|
474 | 474 | |
|
475 | 475 | _assign_system_re = re.compile(r'(?P<lhs>(\s*)([\w\.]+)((\s*,\s*[\w\.]+)*))' |
|
476 | 476 | r'\s*=\s*!(?P<cmd>.*)') |
|
477 | 477 | |
|
478 | 478 | |
|
479 | 479 | class AssignSystemTransformer(PrefilterTransformer): |
|
480 | 480 | """Handle the `files = !ls` syntax.""" |
|
481 | 481 | |
|
482 | 482 | priority = Int(100, config=True) |
|
483 | 483 | |
|
484 | 484 | def transform(self, line, continue_prompt): |
|
485 | 485 | m = _assign_system_re.match(line) |
|
486 | 486 | if m is not None: |
|
487 | 487 | cmd = m.group('cmd') |
|
488 | 488 | lhs = m.group('lhs') |
|
489 | 489 | expr = make_quoted_expr("sc =%s" % cmd) |
|
490 | 490 | new_line = '%s = get_ipython().magic(%s)' % (lhs, expr) |
|
491 | 491 | return new_line |
|
492 | 492 | return line |
|
493 | 493 | |
|
494 | 494 | |
|
495 | 495 | _assign_magic_re = re.compile(r'(?P<lhs>(\s*)([\w\.]+)((\s*,\s*[\w\.]+)*))' |
|
496 | 496 | r'\s*=\s*%(?P<cmd>.*)') |
|
497 | 497 | |
|
498 | 498 | class AssignMagicTransformer(PrefilterTransformer): |
|
499 | 499 | """Handle the `a = %who` syntax.""" |
|
500 | 500 | |
|
501 | 501 | priority = Int(200, config=True) |
|
502 | 502 | |
|
503 | 503 | def transform(self, line, continue_prompt): |
|
504 | 504 | m = _assign_magic_re.match(line) |
|
505 | 505 | if m is not None: |
|
506 | 506 | cmd = m.group('cmd') |
|
507 | 507 | lhs = m.group('lhs') |
|
508 | 508 | expr = make_quoted_expr(cmd) |
|
509 | 509 | new_line = '%s = get_ipython().magic(%s)' % (lhs, expr) |
|
510 | 510 | return new_line |
|
511 | 511 | return line |
|
512 | 512 | |
|
513 | 513 | |
|
514 | 514 | _classic_prompt_re = re.compile(r'(^[ \t]*>>> |^[ \t]*\.\.\. )') |
|
515 | 515 | |
|
516 | 516 | class PyPromptTransformer(PrefilterTransformer): |
|
517 | 517 | """Handle inputs that start with '>>> ' syntax.""" |
|
518 | 518 | |
|
519 | 519 | priority = Int(50, config=True) |
|
520 | 520 | |
|
521 | 521 | def transform(self, line, continue_prompt): |
|
522 | 522 | |
|
523 | 523 | if not line or line.isspace() or line.strip() == '...': |
|
524 | 524 | # This allows us to recognize multiple input prompts separated by |
|
525 | 525 | # blank lines and pasted in a single chunk, very common when |
|
526 | 526 | # pasting doctests or long tutorial passages. |
|
527 | 527 | return '' |
|
528 | 528 | m = _classic_prompt_re.match(line) |
|
529 | 529 | if m: |
|
530 | 530 | return line[len(m.group(0)):] |
|
531 | 531 | else: |
|
532 | 532 | return line |
|
533 | 533 | |
|
534 | 534 | |
|
535 | 535 | _ipy_prompt_re = re.compile(r'(^[ \t]*In \[\d+\]: |^[ \t]*\ \ \ \.\.\.+: )') |
|
536 | 536 | |
|
537 | 537 | class IPyPromptTransformer(PrefilterTransformer): |
|
538 | 538 | """Handle inputs that start classic IPython prompt syntax.""" |
|
539 | 539 | |
|
540 | 540 | priority = Int(50, config=True) |
|
541 | 541 | |
|
542 | 542 | def transform(self, line, continue_prompt): |
|
543 | 543 | |
|
544 | 544 | if not line or line.isspace() or line.strip() == '...': |
|
545 | 545 | # This allows us to recognize multiple input prompts separated by |
|
546 | 546 | # blank lines and pasted in a single chunk, very common when |
|
547 | 547 | # pasting doctests or long tutorial passages. |
|
548 | 548 | return '' |
|
549 | 549 | m = _ipy_prompt_re.match(line) |
|
550 | 550 | if m: |
|
551 | 551 | return line[len(m.group(0)):] |
|
552 | 552 | else: |
|
553 | 553 | return line |
|
554 | 554 | |
|
555 | 555 | #----------------------------------------------------------------------------- |
|
556 | 556 | # Prefilter checkers |
|
557 | 557 | #----------------------------------------------------------------------------- |
|
558 | 558 | |
|
559 | 559 | |
|
560 | 560 | class PrefilterChecker(Configurable): |
|
561 | 561 | """Inspect an input line and return a handler for that line.""" |
|
562 | 562 | |
|
563 | 563 | priority = Int(100, config=True) |
|
564 | 564 | shell = Instance('IPython.core.interactiveshell.InteractiveShellABC') |
|
565 | 565 | prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager') |
|
566 | 566 | enabled = Bool(True, config=True) |
|
567 | 567 | |
|
568 | 568 | def __init__(self, shell=None, prefilter_manager=None, config=None): |
|
569 | 569 | super(PrefilterChecker, self).__init__( |
|
570 | 570 | shell=shell, prefilter_manager=prefilter_manager, config=config |
|
571 | 571 | ) |
|
572 | 572 | self.prefilter_manager.register_checker(self) |
|
573 | 573 | |
|
574 | 574 | def check(self, line_info): |
|
575 | 575 | """Inspect line_info and return a handler instance or None.""" |
|
576 | 576 | return None |
|
577 | 577 | |
|
578 | 578 | def __repr__(self): |
|
579 | 579 | return "<%s(priority=%r, enabled=%r)>" % ( |
|
580 | 580 | self.__class__.__name__, self.priority, self.enabled) |
|
581 | 581 | |
|
582 | 582 | |
|
583 | 583 | class EmacsChecker(PrefilterChecker): |
|
584 | 584 | |
|
585 | 585 | priority = Int(100, config=True) |
|
586 | 586 | enabled = Bool(False, config=True) |
|
587 | 587 | |
|
588 | 588 | def check(self, line_info): |
|
589 | 589 | "Emacs ipython-mode tags certain input lines." |
|
590 | 590 | if line_info.line.endswith('# PYTHON-MODE'): |
|
591 | 591 | return self.prefilter_manager.get_handler_by_name('emacs') |
|
592 | 592 | else: |
|
593 | 593 | return None |
|
594 | 594 | |
|
595 | 595 | |
|
596 | 596 | class ShellEscapeChecker(PrefilterChecker): |
|
597 | 597 | |
|
598 | 598 | priority = Int(200, config=True) |
|
599 | 599 | |
|
600 | 600 | def check(self, line_info): |
|
601 | 601 | if line_info.line.lstrip().startswith(ESC_SHELL): |
|
602 | 602 | return self.prefilter_manager.get_handler_by_name('shell') |
|
603 | 603 | |
|
604 | 604 | |
|
605 | 605 | class IPyAutocallChecker(PrefilterChecker): |
|
606 | 606 | |
|
607 | 607 | priority = Int(300, config=True) |
|
608 | 608 | |
|
609 | 609 | def check(self, line_info): |
|
610 | 610 | "Instances of IPyAutocall in user_ns get autocalled immediately" |
|
611 | 611 | obj = self.shell.user_ns.get(line_info.ifun, None) |
|
612 | 612 | if isinstance(obj, IPyAutocall): |
|
613 | 613 | obj.set_ip(self.shell) |
|
614 | 614 | return self.prefilter_manager.get_handler_by_name('auto') |
|
615 | 615 | else: |
|
616 | 616 | return None |
|
617 | 617 | |
|
618 | 618 | |
|
619 | 619 | class MultiLineMagicChecker(PrefilterChecker): |
|
620 | 620 | |
|
621 | 621 | priority = Int(400, config=True) |
|
622 | 622 | |
|
623 | 623 | def check(self, line_info): |
|
624 | 624 | "Allow ! and !! in multi-line statements if multi_line_specials is on" |
|
625 | 625 | # Note that this one of the only places we check the first character of |
|
626 | 626 | # ifun and *not* the pre_char. Also note that the below test matches |
|
627 | 627 | # both ! and !!. |
|
628 | 628 | if line_info.continue_prompt \ |
|
629 | 629 | and self.prefilter_manager.multi_line_specials: |
|
630 | 630 | if line_info.ifun.startswith(ESC_MAGIC): |
|
631 | 631 | return self.prefilter_manager.get_handler_by_name('magic') |
|
632 | 632 | else: |
|
633 | 633 | return None |
|
634 | 634 | |
|
635 | 635 | |
|
636 | 636 | class EscCharsChecker(PrefilterChecker): |
|
637 | 637 | |
|
638 | 638 | priority = Int(500, config=True) |
|
639 | 639 | |
|
640 | 640 | def check(self, line_info): |
|
641 | 641 | """Check for escape character and return either a handler to handle it, |
|
642 | 642 | or None if there is no escape char.""" |
|
643 | 643 | if line_info.line[-1] == ESC_HELP \ |
|
644 | 644 | and line_info.pre_char != ESC_SHELL \ |
|
645 | 645 | and line_info.pre_char != ESC_SH_CAP: |
|
646 | 646 | # the ? can be at the end, but *not* for either kind of shell escape, |
|
647 | 647 | # because a ? can be a vaild final char in a shell cmd |
|
648 | 648 | return self.prefilter_manager.get_handler_by_name('help') |
|
649 | 649 | else: |
|
650 | 650 | # This returns None like it should if no handler exists |
|
651 | 651 | return self.prefilter_manager.get_handler_by_esc(line_info.pre_char) |
|
652 | 652 | |
|
653 | 653 | |
|
654 | 654 | class AssignmentChecker(PrefilterChecker): |
|
655 | 655 | |
|
656 | 656 | priority = Int(600, config=True) |
|
657 | 657 | |
|
658 | 658 | def check(self, line_info): |
|
659 | 659 | """Check to see if user is assigning to a var for the first time, in |
|
660 | 660 | which case we want to avoid any sort of automagic / autocall games. |
|
661 | 661 | |
|
662 | 662 | This allows users to assign to either alias or magic names true python |
|
663 | 663 | variables (the magic/alias systems always take second seat to true |
|
664 | 664 | python code). E.g. ls='hi', or ls,that=1,2""" |
|
665 | 665 | if line_info.the_rest: |
|
666 | 666 | if line_info.the_rest[0] in '=,': |
|
667 | 667 | return self.prefilter_manager.get_handler_by_name('normal') |
|
668 | 668 | else: |
|
669 | 669 | return None |
|
670 | 670 | |
|
671 | 671 | |
|
672 | 672 | class AutoMagicChecker(PrefilterChecker): |
|
673 | 673 | |
|
674 | 674 | priority = Int(700, config=True) |
|
675 | 675 | |
|
676 | 676 | def check(self, line_info): |
|
677 | 677 | """If the ifun is magic, and automagic is on, run it. Note: normal, |
|
678 | 678 | non-auto magic would already have been triggered via '%' in |
|
679 | 679 | check_esc_chars. This just checks for automagic. Also, before |
|
680 | 680 | triggering the magic handler, make sure that there is nothing in the |
|
681 | 681 | user namespace which could shadow it.""" |
|
682 | 682 | if not self.shell.automagic or not hasattr(self.shell,'magic_'+line_info.ifun): |
|
683 | 683 | return None |
|
684 | 684 | |
|
685 | 685 | # We have a likely magic method. Make sure we should actually call it. |
|
686 | 686 | if line_info.continue_prompt and not self.prefilter_manager.multi_line_specials: |
|
687 | 687 | return None |
|
688 | 688 | |
|
689 | 689 | head = line_info.ifun.split('.',1)[0] |
|
690 | 690 | if is_shadowed(head, self.shell): |
|
691 | 691 | return None |
|
692 | 692 | |
|
693 | 693 | return self.prefilter_manager.get_handler_by_name('magic') |
|
694 | 694 | |
|
695 | 695 | |
|
696 | 696 | class AliasChecker(PrefilterChecker): |
|
697 | 697 | |
|
698 | 698 | priority = Int(800, config=True) |
|
699 | 699 | |
|
700 | 700 | def check(self, line_info): |
|
701 | 701 | "Check if the initital identifier on the line is an alias." |
|
702 | 702 | # Note: aliases can not contain '.' |
|
703 | 703 | head = line_info.ifun.split('.',1)[0] |
|
704 | 704 | if line_info.ifun not in self.shell.alias_manager \ |
|
705 | 705 | or head not in self.shell.alias_manager \ |
|
706 | 706 | or is_shadowed(head, self.shell): |
|
707 | 707 | return None |
|
708 | 708 | |
|
709 | 709 | return self.prefilter_manager.get_handler_by_name('alias') |
|
710 | 710 | |
|
711 | 711 | |
|
712 | 712 | class PythonOpsChecker(PrefilterChecker): |
|
713 | 713 | |
|
714 | 714 | priority = Int(900, config=True) |
|
715 | 715 | |
|
716 | 716 | def check(self, line_info): |
|
717 | 717 | """If the 'rest' of the line begins with a function call or pretty much |
|
718 | 718 | any python operator, we should simply execute the line (regardless of |
|
719 | 719 | whether or not there's a possible autocall expansion). This avoids |
|
720 | 720 | spurious (and very confusing) geattr() accesses.""" |
|
721 | 721 | if line_info.the_rest and line_info.the_rest[0] in '!=()<>,+*/%^&|': |
|
722 | 722 | return self.prefilter_manager.get_handler_by_name('normal') |
|
723 | 723 | else: |
|
724 | 724 | return None |
|
725 | 725 | |
|
726 | 726 | |
|
727 | 727 | class AutocallChecker(PrefilterChecker): |
|
728 | 728 | |
|
729 | 729 | priority = Int(1000, config=True) |
|
730 | 730 | |
|
731 | 731 | def check(self, line_info): |
|
732 | 732 | "Check if the initial word/function is callable and autocall is on." |
|
733 | 733 | if not self.shell.autocall: |
|
734 | 734 | return None |
|
735 | 735 | |
|
736 | 736 | oinfo = line_info.ofind(self.shell) # This can mutate state via getattr |
|
737 | 737 | if not oinfo['found']: |
|
738 | 738 | return None |
|
739 | 739 | |
|
740 | 740 | if callable(oinfo['obj']) \ |
|
741 | 741 | and (not re_exclude_auto.match(line_info.the_rest)) \ |
|
742 | 742 | and re_fun_name.match(line_info.ifun): |
|
743 | 743 | return self.prefilter_manager.get_handler_by_name('auto') |
|
744 | 744 | else: |
|
745 | 745 | return None |
|
746 | 746 | |
|
747 | 747 | |
|
748 | 748 | #----------------------------------------------------------------------------- |
|
749 | 749 | # Prefilter handlers |
|
750 | 750 | #----------------------------------------------------------------------------- |
|
751 | 751 | |
|
752 | 752 | |
|
753 | 753 | class PrefilterHandler(Configurable): |
|
754 | 754 | |
|
755 | 755 | handler_name = Str('normal') |
|
756 | 756 | esc_strings = List([]) |
|
757 | 757 | shell = Instance('IPython.core.interactiveshell.InteractiveShellABC') |
|
758 | 758 | prefilter_manager = Instance('IPython.core.prefilter.PrefilterManager') |
|
759 | 759 | |
|
760 | 760 | def __init__(self, shell=None, prefilter_manager=None, config=None): |
|
761 | 761 | super(PrefilterHandler, self).__init__( |
|
762 | 762 | shell=shell, prefilter_manager=prefilter_manager, config=config |
|
763 | 763 | ) |
|
764 | 764 | self.prefilter_manager.register_handler( |
|
765 | 765 | self.handler_name, |
|
766 | 766 | self, |
|
767 | 767 | self.esc_strings |
|
768 | 768 | ) |
|
769 | 769 | |
|
770 | 770 | def handle(self, line_info): |
|
771 | 771 | # print "normal: ", line_info |
|
772 | 772 | """Handle normal input lines. Use as a template for handlers.""" |
|
773 | 773 | |
|
774 | 774 | # With autoindent on, we need some way to exit the input loop, and I |
|
775 | 775 | # don't want to force the user to have to backspace all the way to |
|
776 | 776 | # clear the line. The rule will be in this case, that either two |
|
777 | 777 | # lines of pure whitespace in a row, or a line of pure whitespace but |
|
778 | 778 | # of a size different to the indent level, will exit the input loop. |
|
779 | 779 | line = line_info.line |
|
780 | 780 | continue_prompt = line_info.continue_prompt |
|
781 | 781 | |
|
782 | 782 | if (continue_prompt and |
|
783 | 783 | self.shell.autoindent and |
|
784 | 784 | line.isspace() and |
|
785 | 785 | |
|
786 | 786 | (0 < abs(len(line) - self.shell.indent_current_nsp) <= 2 |
|
787 | 787 | or |
|
788 | 788 | not self.shell.buffer |
|
789 | 789 | or |
|
790 | 790 | (self.shell.buffer[-1]).isspace() |
|
791 | 791 | ) |
|
792 | 792 | ): |
|
793 | 793 | line = '' |
|
794 | 794 | |
|
795 | 795 | self.shell.log(line, line, continue_prompt) |
|
796 | 796 | return line |
|
797 | 797 | |
|
798 | 798 | def __str__(self): |
|
799 | 799 | return "<%s(name=%s)>" % (self.__class__.__name__, self.handler_name) |
|
800 | 800 | |
|
801 | 801 | |
|
802 | 802 | class AliasHandler(PrefilterHandler): |
|
803 | 803 | |
|
804 | 804 | handler_name = Str('alias') |
|
805 | 805 | |
|
806 | 806 | def handle(self, line_info): |
|
807 | 807 | """Handle alias input lines. """ |
|
808 | 808 | transformed = self.shell.alias_manager.expand_aliases(line_info.ifun,line_info.the_rest) |
|
809 | 809 | # pre is needed, because it carries the leading whitespace. Otherwise |
|
810 | 810 | # aliases won't work in indented sections. |
|
811 | 811 | line_out = '%sget_ipython().system(%s)' % (line_info.pre_whitespace, |
|
812 | 812 | make_quoted_expr(transformed)) |
|
813 | 813 | |
|
814 | 814 | self.shell.log(line_info.line, line_out, line_info.continue_prompt) |
|
815 | 815 | return line_out |
|
816 | 816 | |
|
817 | 817 | |
|
818 | 818 | class ShellEscapeHandler(PrefilterHandler): |
|
819 | 819 | |
|
820 | 820 | handler_name = Str('shell') |
|
821 | 821 | esc_strings = List([ESC_SHELL, ESC_SH_CAP]) |
|
822 | 822 | |
|
823 | 823 | def handle(self, line_info): |
|
824 | 824 | """Execute the line in a shell, empty return value""" |
|
825 | 825 | magic_handler = self.prefilter_manager.get_handler_by_name('magic') |
|
826 | 826 | |
|
827 | 827 | line = line_info.line |
|
828 | 828 | if line.lstrip().startswith(ESC_SH_CAP): |
|
829 | 829 | # rewrite LineInfo's line, ifun and the_rest to properly hold the |
|
830 | 830 | # call to %sx and the actual command to be executed, so |
|
831 | 831 | # handle_magic can work correctly. Note that this works even if |
|
832 | 832 | # the line is indented, so it handles multi_line_specials |
|
833 | 833 | # properly. |
|
834 | 834 | new_rest = line.lstrip()[2:] |
|
835 | 835 | line_info.line = '%ssx %s' % (ESC_MAGIC, new_rest) |
|
836 | 836 | line_info.ifun = 'sx' |
|
837 | 837 | line_info.the_rest = new_rest |
|
838 | 838 | return magic_handler.handle(line_info) |
|
839 | 839 | else: |
|
840 | 840 | cmd = line.lstrip().lstrip(ESC_SHELL) |
|
841 | 841 | line_out = '%sget_ipython().system(%s)' % (line_info.pre_whitespace, |
|
842 | 842 | make_quoted_expr(cmd)) |
|
843 | 843 | # update cache/log and return |
|
844 | 844 | self.shell.log(line, line_out, line_info.continue_prompt) |
|
845 | 845 | return line_out |
|
846 | 846 | |
|
847 | 847 | |
|
848 | 848 | class MagicHandler(PrefilterHandler): |
|
849 | 849 | |
|
850 | 850 | handler_name = Str('magic') |
|
851 | 851 | esc_strings = List([ESC_MAGIC]) |
|
852 | 852 | |
|
853 | 853 | def handle(self, line_info): |
|
854 | 854 | """Execute magic functions.""" |
|
855 | 855 | ifun = line_info.ifun |
|
856 | 856 | the_rest = line_info.the_rest |
|
857 | 857 | cmd = '%sget_ipython().magic(%s)' % (line_info.pre_whitespace, |
|
858 | 858 | make_quoted_expr(ifun + " " + the_rest)) |
|
859 | 859 | self.shell.log(line_info.line, cmd, line_info.continue_prompt) |
|
860 | 860 | return cmd |
|
861 | 861 | |
|
862 | 862 | |
|
863 | 863 | class AutoHandler(PrefilterHandler): |
|
864 | 864 | |
|
865 | 865 | handler_name = Str('auto') |
|
866 | 866 | esc_strings = List([ESC_PAREN, ESC_QUOTE, ESC_QUOTE2]) |
|
867 | 867 | |
|
868 | 868 | def handle(self, line_info): |
|
869 | 869 | """Handle lines which can be auto-executed, quoting if requested.""" |
|
870 | 870 | line = line_info.line |
|
871 | 871 | ifun = line_info.ifun |
|
872 | 872 | the_rest = line_info.the_rest |
|
873 | 873 | pre = line_info.pre |
|
874 | 874 | continue_prompt = line_info.continue_prompt |
|
875 | 875 | obj = line_info.ofind(self)['obj'] |
|
876 | 876 | #print 'pre <%s> ifun <%s> rest <%s>' % (pre,ifun,the_rest) # dbg |
|
877 | 877 | |
|
878 | 878 | # This should only be active for single-line input! |
|
879 | 879 | if continue_prompt: |
|
880 | 880 | self.shell.log(line,line,continue_prompt) |
|
881 | 881 | return line |
|
882 | 882 | |
|
883 | 883 | force_auto = isinstance(obj, IPyAutocall) |
|
884 | 884 | auto_rewrite = True |
|
885 | 885 | |
|
886 | 886 | if pre == ESC_QUOTE: |
|
887 | 887 | # Auto-quote splitting on whitespace |
|
888 | 888 | newcmd = '%s("%s")' % (ifun,'", "'.join(the_rest.split()) ) |
|
889 | 889 | elif pre == ESC_QUOTE2: |
|
890 | 890 | # Auto-quote whole string |
|
891 | 891 | newcmd = '%s("%s")' % (ifun,the_rest) |
|
892 | 892 | elif pre == ESC_PAREN: |
|
893 | 893 | newcmd = '%s(%s)' % (ifun,",".join(the_rest.split())) |
|
894 | 894 | else: |
|
895 | 895 | # Auto-paren. |
|
896 | 896 | # We only apply it to argument-less calls if the autocall |
|
897 | 897 | # parameter is set to 2. We only need to check that autocall is < |
|
898 | 898 | # 2, since this function isn't called unless it's at least 1. |
|
899 | 899 | if not the_rest and (self.shell.autocall < 2) and not force_auto: |
|
900 | 900 | newcmd = '%s %s' % (ifun,the_rest) |
|
901 | 901 | auto_rewrite = False |
|
902 | 902 | else: |
|
903 | 903 | if not force_auto and the_rest.startswith('['): |
|
904 | 904 | if hasattr(obj,'__getitem__'): |
|
905 | 905 | # Don't autocall in this case: item access for an object |
|
906 | 906 | # which is BOTH callable and implements __getitem__. |
|
907 | 907 | newcmd = '%s %s' % (ifun,the_rest) |
|
908 | 908 | auto_rewrite = False |
|
909 | 909 | else: |
|
910 | 910 | # if the object doesn't support [] access, go ahead and |
|
911 | 911 | # autocall |
|
912 | 912 | newcmd = '%s(%s)' % (ifun.rstrip(),the_rest) |
|
913 | 913 | elif the_rest.endswith(';'): |
|
914 | 914 | newcmd = '%s(%s);' % (ifun.rstrip(),the_rest[:-1]) |
|
915 | 915 | else: |
|
916 | 916 | newcmd = '%s(%s)' % (ifun.rstrip(), the_rest) |
|
917 | 917 | |
|
918 | 918 | if auto_rewrite: |
|
919 | 919 | self.shell.auto_rewrite_input(newcmd) |
|
920 | 920 | |
|
921 | 921 | # log what is now valid Python, not the actual user input (without the |
|
922 | 922 | # final newline) |
|
923 | 923 | self.shell.log(line,newcmd,continue_prompt) |
|
924 | 924 | return newcmd |
|
925 | 925 | |
|
926 | 926 | |
|
927 | 927 | class HelpHandler(PrefilterHandler): |
|
928 | 928 | |
|
929 | 929 | handler_name = Str('help') |
|
930 | 930 | esc_strings = List([ESC_HELP]) |
|
931 | 931 | |
|
932 | 932 | def handle(self, line_info): |
|
933 | 933 | """Try to get some help for the object. |
|
934 | 934 | |
|
935 | 935 | obj? or ?obj -> basic information. |
|
936 | 936 | obj?? or ??obj -> more details. |
|
937 | 937 | """ |
|
938 | 938 | normal_handler = self.prefilter_manager.get_handler_by_name('normal') |
|
939 | 939 | line = line_info.line |
|
940 | 940 | # We need to make sure that we don't process lines which would be |
|
941 | 941 | # otherwise valid python, such as "x=1 # what?" |
|
942 | 942 | try: |
|
943 | 943 | codeop.compile_command(line) |
|
944 | 944 | except SyntaxError: |
|
945 | 945 | # We should only handle as help stuff which is NOT valid syntax |
|
946 | 946 | if line[0]==ESC_HELP: |
|
947 | 947 | line = line[1:] |
|
948 | 948 | elif line[-1]==ESC_HELP: |
|
949 | 949 | line = line[:-1] |
|
950 | 950 | self.shell.log(line, '#?'+line, line_info.continue_prompt) |
|
951 | 951 | if line: |
|
952 | 952 | #print 'line:<%r>' % line # dbg |
|
953 | 953 | self.shell.magic_pinfo(line) |
|
954 | 954 | else: |
|
955 | 955 | self.shell.show_usage() |
|
956 | 956 | return '' # Empty string is needed here! |
|
957 | 957 | except: |
|
958 | 958 | raise |
|
959 | 959 | # Pass any other exceptions through to the normal handler |
|
960 | 960 | return normal_handler.handle(line_info) |
|
961 | 961 | else: |
|
962 | 962 | # If the code compiles ok, we should handle it normally |
|
963 | 963 | return normal_handler.handle(line_info) |
|
964 | 964 | |
|
965 | 965 | |
|
966 | 966 | class EmacsHandler(PrefilterHandler): |
|
967 | 967 | |
|
968 | 968 | handler_name = Str('emacs') |
|
969 | 969 | esc_strings = List([]) |
|
970 | 970 | |
|
971 | 971 | def handle(self, line_info): |
|
972 | 972 | """Handle input lines marked by python-mode.""" |
|
973 | 973 | |
|
974 | 974 | # Currently, nothing is done. Later more functionality can be added |
|
975 | 975 | # here if needed. |
|
976 | 976 | |
|
977 | 977 | # The input cache shouldn't be updated |
|
978 | 978 | return line_info.line |
|
979 | 979 | |
|
980 | 980 | |
|
981 | 981 | #----------------------------------------------------------------------------- |
|
982 | 982 | # Defaults |
|
983 | 983 | #----------------------------------------------------------------------------- |
|
984 | 984 | |
|
985 | 985 | |
|
986 | 986 | _default_transformers = [ |
|
987 | 987 | AssignSystemTransformer, |
|
988 | 988 | AssignMagicTransformer, |
|
989 | 989 | PyPromptTransformer, |
|
990 | 990 | IPyPromptTransformer, |
|
991 | 991 | ] |
|
992 | 992 | |
|
993 | 993 | _default_checkers = [ |
|
994 | 994 | EmacsChecker, |
|
995 | 995 | ShellEscapeChecker, |
|
996 | 996 | IPyAutocallChecker, |
|
997 | 997 | MultiLineMagicChecker, |
|
998 | 998 | EscCharsChecker, |
|
999 | 999 | AssignmentChecker, |
|
1000 | 1000 | AutoMagicChecker, |
|
1001 | 1001 | AliasChecker, |
|
1002 | 1002 | PythonOpsChecker, |
|
1003 | 1003 | AutocallChecker |
|
1004 | 1004 | ] |
|
1005 | 1005 | |
|
1006 | 1006 | _default_handlers = [ |
|
1007 | 1007 | PrefilterHandler, |
|
1008 | 1008 | AliasHandler, |
|
1009 | 1009 | ShellEscapeHandler, |
|
1010 | 1010 | MagicHandler, |
|
1011 | 1011 | AutoHandler, |
|
1012 | 1012 | HelpHandler, |
|
1013 | 1013 | EmacsHandler |
|
1014 | 1014 | ] |
@@ -1,658 +1,658 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """Tests for the inputsplitter module. |
|
3 | 3 | """ |
|
4 | 4 | #----------------------------------------------------------------------------- |
|
5 | 5 | # Copyright (C) 2010 The IPython Development Team |
|
6 | 6 | # |
|
7 | 7 | # Distributed under the terms of the BSD License. The full license is in |
|
8 | 8 | # the file COPYING, distributed as part of this software. |
|
9 | 9 | #----------------------------------------------------------------------------- |
|
10 | 10 | |
|
11 | 11 | #----------------------------------------------------------------------------- |
|
12 | 12 | # Imports |
|
13 | 13 | #----------------------------------------------------------------------------- |
|
14 | 14 | # stdlib |
|
15 | 15 | import unittest |
|
16 | 16 | import sys |
|
17 | 17 | |
|
18 | 18 | # Third party |
|
19 | 19 | import nose.tools as nt |
|
20 | 20 | |
|
21 | 21 | # Our own |
|
22 | 22 | from IPython.core import inputsplitter as isp |
|
23 | 23 | |
|
24 | 24 | #----------------------------------------------------------------------------- |
|
25 | 25 | # Semi-complete examples (also used as tests) |
|
26 | 26 | #----------------------------------------------------------------------------- |
|
27 | 27 | |
|
28 | 28 | # Note: at the bottom, there's a slightly more complete version of this that |
|
29 | 29 | # can be useful during development of code here. |
|
30 | 30 | |
|
31 |
def mini_interactive_loop( |
|
|
31 | def mini_interactive_loop(input_func): | |
|
32 | 32 | """Minimal example of the logic of an interactive interpreter loop. |
|
33 | 33 | |
|
34 | 34 | This serves as an example, and it is used by the test system with a fake |
|
35 | 35 | raw_input that simulates interactive input.""" |
|
36 | 36 | |
|
37 | 37 | from IPython.core.inputsplitter import InputSplitter |
|
38 | 38 | |
|
39 | 39 | isp = InputSplitter() |
|
40 | 40 | # In practice, this input loop would be wrapped in an outside loop to read |
|
41 | 41 | # input indefinitely, until some exit/quit command was issued. Here we |
|
42 | 42 | # only illustrate the basic inner loop. |
|
43 | 43 | while isp.push_accepts_more(): |
|
44 | 44 | indent = ' '*isp.indent_spaces |
|
45 | 45 | prompt = '>>> ' + indent |
|
46 |
line = indent + |
|
|
46 | line = indent + input_func(prompt) | |
|
47 | 47 | isp.push(line) |
|
48 | 48 | |
|
49 | 49 | # Here we just return input so we can use it in a test suite, but a real |
|
50 | 50 | # interpreter would instead send it for execution somewhere. |
|
51 | 51 | src = isp.source_reset() |
|
52 | 52 | #print 'Input source was:\n', src # dbg |
|
53 | 53 | return src |
|
54 | 54 | |
|
55 | 55 | #----------------------------------------------------------------------------- |
|
56 | 56 | # Test utilities, just for local use |
|
57 | 57 | #----------------------------------------------------------------------------- |
|
58 | 58 | |
|
59 | 59 | def assemble(block): |
|
60 | 60 | """Assemble a block into multi-line sub-blocks.""" |
|
61 | 61 | return ['\n'.join(sub_block)+'\n' for sub_block in block] |
|
62 | 62 | |
|
63 | 63 | |
|
64 | 64 | def pseudo_input(lines): |
|
65 | 65 | """Return a function that acts like raw_input but feeds the input list.""" |
|
66 | 66 | ilines = iter(lines) |
|
67 | 67 | def raw_in(prompt): |
|
68 | 68 | try: |
|
69 | 69 | return next(ilines) |
|
70 | 70 | except StopIteration: |
|
71 | 71 | return '' |
|
72 | 72 | return raw_in |
|
73 | 73 | |
|
74 | 74 | #----------------------------------------------------------------------------- |
|
75 | 75 | # Tests |
|
76 | 76 | #----------------------------------------------------------------------------- |
|
77 | 77 | def test_spaces(): |
|
78 | 78 | tests = [('', 0), |
|
79 | 79 | (' ', 1), |
|
80 | 80 | ('\n', 0), |
|
81 | 81 | (' \n', 1), |
|
82 | 82 | ('x', 0), |
|
83 | 83 | (' x', 1), |
|
84 | 84 | (' x',2), |
|
85 | 85 | (' x',4), |
|
86 | 86 | # Note: tabs are counted as a single whitespace! |
|
87 | 87 | ('\tx', 1), |
|
88 | 88 | ('\t x', 2), |
|
89 | 89 | ] |
|
90 | 90 | |
|
91 | 91 | for s, nsp in tests: |
|
92 | 92 | nt.assert_equal(isp.num_ini_spaces(s), nsp) |
|
93 | 93 | |
|
94 | 94 | |
|
95 | 95 | def test_remove_comments(): |
|
96 | 96 | tests = [('text', 'text'), |
|
97 | 97 | ('text # comment', 'text '), |
|
98 | 98 | ('text # comment\n', 'text \n'), |
|
99 | 99 | ('text # comment \n', 'text \n'), |
|
100 | 100 | ('line # c \nline\n','line \nline\n'), |
|
101 | 101 | ('line # c \nline#c2 \nline\nline #c\n\n', |
|
102 | 102 | 'line \nline\nline\nline \n\n'), |
|
103 | 103 | ] |
|
104 | 104 | |
|
105 | 105 | for inp, out in tests: |
|
106 | 106 | nt.assert_equal(isp.remove_comments(inp), out) |
|
107 | 107 | |
|
108 | 108 | |
|
109 | 109 | def test_get_input_encoding(): |
|
110 | 110 | encoding = isp.get_input_encoding() |
|
111 | 111 | nt.assert_true(isinstance(encoding, basestring)) |
|
112 | 112 | # simple-minded check that at least encoding a simple string works with the |
|
113 | 113 | # encoding we got. |
|
114 | 114 | nt.assert_equal('test'.encode(encoding), 'test') |
|
115 | 115 | |
|
116 | 116 | |
|
117 | 117 | class NoInputEncodingTestCase(unittest.TestCase): |
|
118 | 118 | def setUp(self): |
|
119 | 119 | self.old_stdin = sys.stdin |
|
120 | 120 | class X: pass |
|
121 | 121 | fake_stdin = X() |
|
122 | 122 | sys.stdin = fake_stdin |
|
123 | 123 | |
|
124 | 124 | def test(self): |
|
125 | 125 | # Verify that if sys.stdin has no 'encoding' attribute we do the right |
|
126 | 126 | # thing |
|
127 | 127 | enc = isp.get_input_encoding() |
|
128 | 128 | self.assertEqual(enc, 'ascii') |
|
129 | 129 | |
|
130 | 130 | def tearDown(self): |
|
131 | 131 | sys.stdin = self.old_stdin |
|
132 | 132 | |
|
133 | 133 | |
|
134 | 134 | class InputSplitterTestCase(unittest.TestCase): |
|
135 | 135 | def setUp(self): |
|
136 | 136 | self.isp = isp.InputSplitter() |
|
137 | 137 | |
|
138 | 138 | def test_reset(self): |
|
139 | 139 | isp = self.isp |
|
140 | 140 | isp.push('x=1') |
|
141 | 141 | isp.reset() |
|
142 | 142 | self.assertEqual(isp._buffer, []) |
|
143 | 143 | self.assertEqual(isp.indent_spaces, 0) |
|
144 | 144 | self.assertEqual(isp.source, '') |
|
145 | 145 | self.assertEqual(isp.code, None) |
|
146 | 146 | self.assertEqual(isp._is_complete, False) |
|
147 | 147 | |
|
148 | 148 | def test_source(self): |
|
149 | 149 | self.isp._store('1') |
|
150 | 150 | self.isp._store('2') |
|
151 | 151 | self.assertEqual(self.isp.source, '1\n2\n') |
|
152 | 152 | self.assertTrue(len(self.isp._buffer)>0) |
|
153 | 153 | self.assertEqual(self.isp.source_reset(), '1\n2\n') |
|
154 | 154 | self.assertEqual(self.isp._buffer, []) |
|
155 | 155 | self.assertEqual(self.isp.source, '') |
|
156 | 156 | |
|
157 | 157 | def test_indent(self): |
|
158 | 158 | isp = self.isp # shorthand |
|
159 | 159 | isp.push('x=1') |
|
160 | 160 | self.assertEqual(isp.indent_spaces, 0) |
|
161 | 161 | isp.push('if 1:\n x=1') |
|
162 | 162 | self.assertEqual(isp.indent_spaces, 4) |
|
163 | 163 | isp.push('y=2\n') |
|
164 | 164 | self.assertEqual(isp.indent_spaces, 0) |
|
165 | 165 | isp.push('if 1:') |
|
166 | 166 | self.assertEqual(isp.indent_spaces, 4) |
|
167 | 167 | isp.push(' x=1') |
|
168 | 168 | self.assertEqual(isp.indent_spaces, 4) |
|
169 | 169 | # Blank lines shouldn't change the indent level |
|
170 | 170 | isp.push(' '*2) |
|
171 | 171 | self.assertEqual(isp.indent_spaces, 4) |
|
172 | 172 | |
|
173 | 173 | def test_indent2(self): |
|
174 | 174 | isp = self.isp |
|
175 | 175 | # When a multiline statement contains parens or multiline strings, we |
|
176 | 176 | # shouldn't get confused. |
|
177 | 177 | isp.push("if 1:") |
|
178 | 178 | isp.push(" x = (1+\n 2)") |
|
179 | 179 | self.assertEqual(isp.indent_spaces, 4) |
|
180 | 180 | |
|
181 | 181 | def test_dedent(self): |
|
182 | 182 | isp = self.isp # shorthand |
|
183 | 183 | isp.push('if 1:') |
|
184 | 184 | self.assertEqual(isp.indent_spaces, 4) |
|
185 | 185 | isp.push(' pass') |
|
186 | 186 | self.assertEqual(isp.indent_spaces, 0) |
|
187 | 187 | |
|
188 | 188 | def test_push(self): |
|
189 | 189 | isp = self.isp |
|
190 | 190 | self.assertTrue(isp.push('x=1')) |
|
191 | 191 | |
|
192 | 192 | def test_push2(self): |
|
193 | 193 | isp = self.isp |
|
194 | 194 | self.assertFalse(isp.push('if 1:')) |
|
195 | 195 | for line in [' x=1', '# a comment', ' y=2']: |
|
196 | 196 | self.assertTrue(isp.push(line)) |
|
197 | 197 | |
|
198 | 198 | def test_push3(self): |
|
199 | 199 | """Test input with leading whitespace""" |
|
200 | 200 | isp = self.isp |
|
201 | 201 | isp.push(' x=1') |
|
202 | 202 | isp.push(' y=2') |
|
203 | 203 | self.assertEqual(isp.source, 'if 1:\n x=1\n y=2\n') |
|
204 | 204 | |
|
205 | 205 | def test_replace_mode(self): |
|
206 | 206 | isp = self.isp |
|
207 | 207 | isp.input_mode = 'cell' |
|
208 | 208 | isp.push('x=1') |
|
209 | 209 | self.assertEqual(isp.source, 'x=1\n') |
|
210 | 210 | isp.push('x=2') |
|
211 | 211 | self.assertEqual(isp.source, 'x=2\n') |
|
212 | 212 | |
|
213 | 213 | def test_push_accepts_more(self): |
|
214 | 214 | isp = self.isp |
|
215 | 215 | isp.push('x=1') |
|
216 | 216 | self.assertFalse(isp.push_accepts_more()) |
|
217 | 217 | |
|
218 | 218 | def test_push_accepts_more2(self): |
|
219 | 219 | isp = self.isp |
|
220 | 220 | isp.push('if 1:') |
|
221 | 221 | self.assertTrue(isp.push_accepts_more()) |
|
222 | 222 | isp.push(' x=1') |
|
223 | 223 | self.assertTrue(isp.push_accepts_more()) |
|
224 | 224 | isp.push('') |
|
225 | 225 | self.assertFalse(isp.push_accepts_more()) |
|
226 | 226 | |
|
227 | 227 | def test_push_accepts_more3(self): |
|
228 | 228 | isp = self.isp |
|
229 | 229 | isp.push("x = (2+\n3)") |
|
230 | 230 | self.assertFalse(isp.push_accepts_more()) |
|
231 | 231 | |
|
232 | 232 | def test_push_accepts_more4(self): |
|
233 | 233 | isp = self.isp |
|
234 | 234 | # When a multiline statement contains parens or multiline strings, we |
|
235 | 235 | # shouldn't get confused. |
|
236 | 236 | # FIXME: we should be able to better handle de-dents in statements like |
|
237 | 237 | # multiline strings and multiline expressions (continued with \ or |
|
238 | 238 | # parens). Right now we aren't handling the indentation tracking quite |
|
239 | 239 | # correctly with this, though in practice it may not be too much of a |
|
240 | 240 | # problem. We'll need to see. |
|
241 | 241 | isp.push("if 1:") |
|
242 | 242 | isp.push(" x = (2+") |
|
243 | 243 | isp.push(" 3)") |
|
244 | 244 | self.assertTrue(isp.push_accepts_more()) |
|
245 | 245 | isp.push(" y = 3") |
|
246 | 246 | self.assertTrue(isp.push_accepts_more()) |
|
247 | 247 | isp.push('') |
|
248 | 248 | self.assertFalse(isp.push_accepts_more()) |
|
249 | 249 | |
|
250 | 250 | def test_continuation(self): |
|
251 | 251 | isp = self.isp |
|
252 | 252 | isp.push("import os, \\") |
|
253 | 253 | self.assertTrue(isp.push_accepts_more()) |
|
254 | 254 | isp.push("sys") |
|
255 | 255 | self.assertFalse(isp.push_accepts_more()) |
|
256 | 256 | |
|
257 | 257 | def test_syntax_error(self): |
|
258 | 258 | isp = self.isp |
|
259 | 259 | # Syntax errors immediately produce a 'ready' block, so the invalid |
|
260 | 260 | # Python can be sent to the kernel for evaluation with possible ipython |
|
261 | 261 | # special-syntax conversion. |
|
262 | 262 | isp.push('run foo') |
|
263 | 263 | self.assertFalse(isp.push_accepts_more()) |
|
264 | 264 | |
|
265 | 265 | def check_split(self, block_lines, compile=True): |
|
266 | 266 | blocks = assemble(block_lines) |
|
267 | 267 | lines = ''.join(blocks) |
|
268 | 268 | oblock = self.isp.split_blocks(lines) |
|
269 | 269 | self.assertEqual(oblock, blocks) |
|
270 | 270 | if compile: |
|
271 | 271 | for block in blocks: |
|
272 | 272 | self.isp._compile(block) |
|
273 | 273 | |
|
274 | 274 | def test_split(self): |
|
275 | 275 | # All blocks of input we want to test in a list. The format for each |
|
276 | 276 | # block is a list of lists, with each inner lists consisting of all the |
|
277 | 277 | # lines (as single-lines) that should make up a sub-block. |
|
278 | 278 | |
|
279 | 279 | # Note: do NOT put here sub-blocks that don't compile, as the |
|
280 | 280 | # check_split() routine makes a final verification pass to check that |
|
281 | 281 | # each sub_block, as returned by split_blocks(), does compile |
|
282 | 282 | # correctly. |
|
283 | 283 | all_blocks = [ [['x=1']], |
|
284 | 284 | |
|
285 | 285 | [['x=1'], |
|
286 | 286 | ['y=2']], |
|
287 | 287 | |
|
288 | 288 | [['x=1', |
|
289 | 289 | '# a comment'], |
|
290 | 290 | ['y=11']], |
|
291 | 291 | |
|
292 | 292 | [['if 1:', |
|
293 | 293 | ' x=1'], |
|
294 | 294 | ['y=3']], |
|
295 | 295 | |
|
296 | 296 | [['def f(x):', |
|
297 | 297 | ' return x'], |
|
298 | 298 | ['x=1']], |
|
299 | 299 | |
|
300 | 300 | [['def f(x):', |
|
301 | 301 | ' x+=1', |
|
302 | 302 | ' ', |
|
303 | 303 | ' return x'], |
|
304 | 304 | ['x=1']], |
|
305 | 305 | |
|
306 | 306 | [['def f(x):', |
|
307 | 307 | ' if x>0:', |
|
308 | 308 | ' y=1', |
|
309 | 309 | ' # a comment', |
|
310 | 310 | ' else:', |
|
311 | 311 | ' y=4', |
|
312 | 312 | ' ', |
|
313 | 313 | ' return y'], |
|
314 | 314 | ['x=1'], |
|
315 | 315 | ['if 1:', |
|
316 | 316 | ' y=11'] ], |
|
317 | 317 | |
|
318 | 318 | [['for i in range(10):' |
|
319 | 319 | ' x=i**2']], |
|
320 | 320 | |
|
321 | 321 | [['for i in range(10):' |
|
322 | 322 | ' x=i**2'], |
|
323 | 323 | ['z = 1']], |
|
324 | 324 | ] |
|
325 | 325 | for block_lines in all_blocks: |
|
326 | 326 | self.check_split(block_lines) |
|
327 | 327 | |
|
328 | 328 | def test_split_syntax_errors(self): |
|
329 | 329 | # Block splitting with invalid syntax |
|
330 | 330 | all_blocks = [ [['a syntax error']], |
|
331 | 331 | |
|
332 | 332 | [['x=1', |
|
333 | 333 | 'another syntax error']], |
|
334 | 334 | |
|
335 | 335 | [['for i in range(10):' |
|
336 | 336 | ' yet another error']], |
|
337 | 337 | |
|
338 | 338 | ] |
|
339 | 339 | for block_lines in all_blocks: |
|
340 | 340 | self.check_split(block_lines, compile=False) |
|
341 | 341 | |
|
342 | 342 | |
|
343 | 343 | class InteractiveLoopTestCase(unittest.TestCase): |
|
344 | 344 | """Tests for an interactive loop like a python shell. |
|
345 | 345 | """ |
|
346 | 346 | def check_ns(self, lines, ns): |
|
347 | 347 | """Validate that the given input lines produce the resulting namespace. |
|
348 | 348 | |
|
349 | 349 | Note: the input lines are given exactly as they would be typed in an |
|
350 | 350 | auto-indenting environment, as mini_interactive_loop above already does |
|
351 | 351 | auto-indenting and prepends spaces to the input. |
|
352 | 352 | """ |
|
353 | 353 | src = mini_interactive_loop(pseudo_input(lines)) |
|
354 | 354 | test_ns = {} |
|
355 | 355 | exec src in test_ns |
|
356 | 356 | # We can't check that the provided ns is identical to the test_ns, |
|
357 | 357 | # because Python fills test_ns with extra keys (copyright, etc). But |
|
358 | 358 | # we can check that the given dict is *contained* in test_ns |
|
359 | for k,v in ns.items(): | |
|
359 | for k,v in ns.iteritems(): | |
|
360 | 360 | self.assertEqual(test_ns[k], v) |
|
361 | 361 | |
|
362 | 362 | def test_simple(self): |
|
363 | 363 | self.check_ns(['x=1'], dict(x=1)) |
|
364 | 364 | |
|
365 | 365 | def test_simple2(self): |
|
366 | 366 | self.check_ns(['if 1:', 'x=2'], dict(x=2)) |
|
367 | 367 | |
|
368 | 368 | def test_xy(self): |
|
369 | 369 | self.check_ns(['x=1; y=2'], dict(x=1, y=2)) |
|
370 | 370 | |
|
371 | 371 | def test_abc(self): |
|
372 | 372 | self.check_ns(['if 1:','a=1','b=2','c=3'], dict(a=1, b=2, c=3)) |
|
373 | 373 | |
|
374 | 374 | def test_multi(self): |
|
375 | 375 | self.check_ns(['x =(1+','1+','2)'], dict(x=4)) |
|
376 | 376 | |
|
377 | 377 | |
|
378 | 378 | def test_LineInfo(): |
|
379 | 379 | """Simple test for LineInfo construction and str()""" |
|
380 | 380 | linfo = isp.LineInfo(' %cd /home') |
|
381 | 381 | nt.assert_equals(str(linfo), 'LineInfo [ |%|cd|/home]') |
|
382 | 382 | |
|
383 | 383 | |
|
384 | 384 | def test_split_user_input(): |
|
385 | 385 | """Unicode test - split_user_input already has good doctests""" |
|
386 | 386 | line = u"Pérez Fernando" |
|
387 | 387 | parts = isp.split_user_input(line) |
|
388 | 388 | parts_expected = (u'', u'', u'', line) |
|
389 | 389 | nt.assert_equal(parts, parts_expected) |
|
390 | 390 | |
|
391 | 391 | |
|
392 | 392 | # Transformer tests |
|
393 | 393 | def transform_checker(tests, func): |
|
394 | 394 | """Utility to loop over test inputs""" |
|
395 | 395 | for inp, tr in tests: |
|
396 | 396 | nt.assert_equals(func(inp), tr) |
|
397 | 397 | |
|
398 | 398 | # Data for all the syntax tests in the form of lists of pairs of |
|
399 | 399 | # raw/transformed input. We store it here as a global dict so that we can use |
|
400 | 400 | # it both within single-function tests and also to validate the behavior of the |
|
401 | 401 | # larger objects |
|
402 | 402 | |
|
403 | 403 | syntax = \ |
|
404 | 404 | dict(assign_system = |
|
405 | 405 | [('a =! ls', 'a = get_ipython().getoutput("ls")'), |
|
406 | 406 | ('b = !ls', 'b = get_ipython().getoutput("ls")'), |
|
407 | 407 | ('x=1', 'x=1'), # normal input is unmodified |
|
408 | 408 | (' ',' '), # blank lines are kept intact |
|
409 | 409 | ], |
|
410 | 410 | |
|
411 | 411 | assign_magic = |
|
412 | 412 | [('a =% who', 'a = get_ipython().magic("who")'), |
|
413 | 413 | ('b = %who', 'b = get_ipython().magic("who")'), |
|
414 | 414 | ('x=1', 'x=1'), # normal input is unmodified |
|
415 | 415 | (' ',' '), # blank lines are kept intact |
|
416 | 416 | ], |
|
417 | 417 | |
|
418 | 418 | classic_prompt = |
|
419 | 419 | [('>>> x=1', 'x=1'), |
|
420 | 420 | ('x=1', 'x=1'), # normal input is unmodified |
|
421 | 421 | (' ', ' '), # blank lines are kept intact |
|
422 | 422 | ('... ', ''), # continuation prompts |
|
423 | 423 | ], |
|
424 | 424 | |
|
425 | 425 | ipy_prompt = |
|
426 | 426 | [('In [1]: x=1', 'x=1'), |
|
427 | 427 | ('x=1', 'x=1'), # normal input is unmodified |
|
428 | 428 | (' ',' '), # blank lines are kept intact |
|
429 | 429 | (' ....: ', ''), # continuation prompts |
|
430 | 430 | ], |
|
431 | 431 | |
|
432 | 432 | # Tests for the escape transformer to leave normal code alone |
|
433 | 433 | escaped_noesc = |
|
434 | 434 | [ (' ', ' '), |
|
435 | 435 | ('x=1', 'x=1'), |
|
436 | 436 | ], |
|
437 | 437 | |
|
438 | 438 | # System calls |
|
439 | 439 | escaped_shell = |
|
440 | 440 | [ ('!ls', 'get_ipython().system("ls")'), |
|
441 | 441 | # Double-escape shell, this means to capture the output of the |
|
442 | 442 | # subprocess and return it |
|
443 | 443 | ('!!ls', 'get_ipython().getoutput("ls")'), |
|
444 | 444 | ], |
|
445 | 445 | |
|
446 | 446 | # Help/object info |
|
447 | 447 | escaped_help = |
|
448 | 448 | [ ('?', 'get_ipython().show_usage()'), |
|
449 | 449 | ('?x1', 'get_ipython().magic("pinfo x1")'), |
|
450 | 450 | ('??x2', 'get_ipython().magic("pinfo2 x2")'), |
|
451 | 451 | ('x3?', 'get_ipython().magic("pinfo x3")'), |
|
452 | 452 | ('x4??', 'get_ipython().magic("pinfo2 x4")'), |
|
453 | 453 | ('%hist?', 'get_ipython().magic("pinfo %hist")'), |
|
454 | 454 | ('f*?', 'get_ipython().magic("psearch f*")'), |
|
455 | 455 | ('ax.*aspe*?', 'get_ipython().magic("psearch ax.*aspe*")'), |
|
456 | 456 | ], |
|
457 | 457 | |
|
458 | 458 | # Explicit magic calls |
|
459 | 459 | escaped_magic = |
|
460 | 460 | [ ('%cd', 'get_ipython().magic("cd")'), |
|
461 | 461 | ('%cd /home', 'get_ipython().magic("cd /home")'), |
|
462 | 462 | (' %magic', ' get_ipython().magic("magic")'), |
|
463 | 463 | ], |
|
464 | 464 | |
|
465 | 465 | # Quoting with separate arguments |
|
466 | 466 | escaped_quote = |
|
467 | 467 | [ (',f', 'f("")'), |
|
468 | 468 | (',f x', 'f("x")'), |
|
469 | 469 | (' ,f y', ' f("y")'), |
|
470 | 470 | (',f a b', 'f("a", "b")'), |
|
471 | 471 | ], |
|
472 | 472 | |
|
473 | 473 | # Quoting with single argument |
|
474 | 474 | escaped_quote2 = |
|
475 | 475 | [ (';f', 'f("")'), |
|
476 | 476 | (';f x', 'f("x")'), |
|
477 | 477 | (' ;f y', ' f("y")'), |
|
478 | 478 | (';f a b', 'f("a b")'), |
|
479 | 479 | ], |
|
480 | 480 | |
|
481 | 481 | # Simply apply parens |
|
482 | 482 | escaped_paren = |
|
483 | 483 | [ ('/f', 'f()'), |
|
484 | 484 | ('/f x', 'f(x)'), |
|
485 | 485 | (' /f y', ' f(y)'), |
|
486 | 486 | ('/f a b', 'f(a, b)'), |
|
487 | 487 | ], |
|
488 | 488 | |
|
489 | 489 | ) |
|
490 | 490 | |
|
491 | 491 | # multiline syntax examples. Each of these should be a list of lists, with |
|
492 | 492 | # each entry itself having pairs of raw/transformed input. The union (with |
|
493 | 493 | # '\n'.join() of the transformed inputs is what the splitter should produce |
|
494 | 494 | # when fed the raw lines one at a time via push. |
|
495 | 495 | syntax_ml = \ |
|
496 | 496 | dict(classic_prompt = |
|
497 | 497 | [ [('>>> for i in range(10):','for i in range(10):'), |
|
498 | 498 | ('... print i',' print i'), |
|
499 | 499 | ('... ', ''), |
|
500 | 500 | ], |
|
501 | 501 | ], |
|
502 | 502 | |
|
503 | 503 | ipy_prompt = |
|
504 | 504 | [ [('In [24]: for i in range(10):','for i in range(10):'), |
|
505 | 505 | (' ....: print i',' print i'), |
|
506 | 506 | (' ....: ', ''), |
|
507 | 507 | ], |
|
508 | 508 | ], |
|
509 | 509 | ) |
|
510 | 510 | |
|
511 | 511 | |
|
512 | 512 | def test_assign_system(): |
|
513 | 513 | transform_checker(syntax['assign_system'], isp.transform_assign_system) |
|
514 | 514 | |
|
515 | 515 | |
|
516 | 516 | def test_assign_magic(): |
|
517 | 517 | transform_checker(syntax['assign_magic'], isp.transform_assign_magic) |
|
518 | 518 | |
|
519 | 519 | |
|
520 | 520 | def test_classic_prompt(): |
|
521 | 521 | transform_checker(syntax['classic_prompt'], isp.transform_classic_prompt) |
|
522 | 522 | for example in syntax_ml['classic_prompt']: |
|
523 | 523 | transform_checker(example, isp.transform_classic_prompt) |
|
524 | 524 | |
|
525 | 525 | |
|
526 | 526 | def test_ipy_prompt(): |
|
527 | 527 | transform_checker(syntax['ipy_prompt'], isp.transform_ipy_prompt) |
|
528 | 528 | for example in syntax_ml['ipy_prompt']: |
|
529 | 529 | transform_checker(example, isp.transform_ipy_prompt) |
|
530 | 530 | |
|
531 | 531 | |
|
532 | 532 | def test_escaped_noesc(): |
|
533 | 533 | transform_checker(syntax['escaped_noesc'], isp.transform_escaped) |
|
534 | 534 | |
|
535 | 535 | |
|
536 | 536 | def test_escaped_shell(): |
|
537 | 537 | transform_checker(syntax['escaped_shell'], isp.transform_escaped) |
|
538 | 538 | |
|
539 | 539 | |
|
540 | 540 | def test_escaped_help(): |
|
541 | 541 | transform_checker(syntax['escaped_help'], isp.transform_escaped) |
|
542 | 542 | |
|
543 | 543 | |
|
544 | 544 | def test_escaped_magic(): |
|
545 | 545 | transform_checker(syntax['escaped_magic'], isp.transform_escaped) |
|
546 | 546 | |
|
547 | 547 | |
|
548 | 548 | def test_escaped_quote(): |
|
549 | 549 | transform_checker(syntax['escaped_quote'], isp.transform_escaped) |
|
550 | 550 | |
|
551 | 551 | |
|
552 | 552 | def test_escaped_quote2(): |
|
553 | 553 | transform_checker(syntax['escaped_quote2'], isp.transform_escaped) |
|
554 | 554 | |
|
555 | 555 | |
|
556 | 556 | def test_escaped_paren(): |
|
557 | 557 | transform_checker(syntax['escaped_paren'], isp.transform_escaped) |
|
558 | 558 | |
|
559 | 559 | |
|
560 | 560 | class IPythonInputTestCase(InputSplitterTestCase): |
|
561 | 561 | """By just creating a new class whose .isp is a different instance, we |
|
562 | 562 | re-run the same test battery on the new input splitter. |
|
563 | 563 | |
|
564 | 564 | In addition, this runs the tests over the syntax and syntax_ml dicts that |
|
565 | 565 | were tested by individual functions, as part of the OO interface. |
|
566 | 566 | """ |
|
567 | 567 | |
|
568 | 568 | def setUp(self): |
|
569 | 569 | self.isp = isp.IPythonInputSplitter(input_mode='line') |
|
570 | 570 | |
|
571 | 571 | def test_syntax(self): |
|
572 | 572 | """Call all single-line syntax tests from the main object""" |
|
573 | 573 | isp = self.isp |
|
574 | 574 | for example in syntax.itervalues(): |
|
575 | 575 | for raw, out_t in example: |
|
576 | 576 | if raw.startswith(' '): |
|
577 | 577 | continue |
|
578 | 578 | |
|
579 | 579 | isp.push(raw) |
|
580 | 580 | out = isp.source_reset().rstrip() |
|
581 | 581 | self.assertEqual(out, out_t) |
|
582 | 582 | |
|
583 | 583 | def test_syntax_multiline(self): |
|
584 | 584 | isp = self.isp |
|
585 | 585 | for example in syntax_ml.itervalues(): |
|
586 | 586 | out_t_parts = [] |
|
587 | 587 | for line_pairs in example: |
|
588 | 588 | for raw, out_t_part in line_pairs: |
|
589 | 589 | isp.push(raw) |
|
590 | 590 | out_t_parts.append(out_t_part) |
|
591 | 591 | |
|
592 | 592 | out = isp.source_reset().rstrip() |
|
593 | 593 | out_t = '\n'.join(out_t_parts).rstrip() |
|
594 | 594 | self.assertEqual(out, out_t) |
|
595 | 595 | |
|
596 | 596 | |
|
597 | 597 | class BlockIPythonInputTestCase(IPythonInputTestCase): |
|
598 | 598 | |
|
599 | 599 | # Deactivate tests that don't make sense for the block mode |
|
600 | 600 | test_push3 = test_split = lambda s: None |
|
601 | 601 | |
|
602 | 602 | def setUp(self): |
|
603 | 603 | self.isp = isp.IPythonInputSplitter(input_mode='cell') |
|
604 | 604 | |
|
605 | 605 | def test_syntax_multiline(self): |
|
606 | 606 | isp = self.isp |
|
607 | 607 | for example in syntax_ml.itervalues(): |
|
608 | 608 | raw_parts = [] |
|
609 | 609 | out_t_parts = [] |
|
610 | 610 | for line_pairs in example: |
|
611 | 611 | for raw, out_t_part in line_pairs: |
|
612 | 612 | raw_parts.append(raw) |
|
613 | 613 | out_t_parts.append(out_t_part) |
|
614 | 614 | |
|
615 | 615 | raw = '\n'.join(raw_parts) |
|
616 | 616 | out_t = '\n'.join(out_t_parts) |
|
617 | 617 | |
|
618 | 618 | isp.push(raw) |
|
619 | 619 | out = isp.source_reset() |
|
620 | 620 | # Match ignoring trailing whitespace |
|
621 | 621 | self.assertEqual(out.rstrip(), out_t.rstrip()) |
|
622 | 622 | |
|
623 | 623 | |
|
624 | 624 | #----------------------------------------------------------------------------- |
|
625 | 625 | # Main - use as a script, mostly for developer experiments |
|
626 | 626 | #----------------------------------------------------------------------------- |
|
627 | 627 | |
|
628 | 628 | if __name__ == '__main__': |
|
629 | 629 | # A simple demo for interactive experimentation. This code will not get |
|
630 | 630 | # picked up by any test suite. |
|
631 | 631 | from IPython.core.inputsplitter import InputSplitter, IPythonInputSplitter |
|
632 | 632 | |
|
633 | 633 | # configure here the syntax to use, prompt and whether to autoindent |
|
634 | 634 | #isp, start_prompt = InputSplitter(), '>>> ' |
|
635 | 635 | isp, start_prompt = IPythonInputSplitter(), 'In> ' |
|
636 | 636 | |
|
637 | 637 | autoindent = True |
|
638 | 638 | #autoindent = False |
|
639 | 639 | |
|
640 | 640 | try: |
|
641 | 641 | while True: |
|
642 | 642 | prompt = start_prompt |
|
643 | 643 | while isp.push_accepts_more(): |
|
644 | 644 | indent = ' '*isp.indent_spaces |
|
645 | 645 | if autoindent: |
|
646 | 646 | line = indent + raw_input(prompt+indent) |
|
647 | 647 | else: |
|
648 | 648 | line = raw_input(prompt) |
|
649 | 649 | isp.push(line) |
|
650 | 650 | prompt = '... ' |
|
651 | 651 | |
|
652 | 652 | # Here we just return input so we can use it in a test suite, but a |
|
653 | 653 | # real interpreter would instead send it for execution somewhere. |
|
654 | 654 | #src = isp.source; raise EOFError # dbg |
|
655 | 655 | src = isp.source_reset() |
|
656 | 656 | print 'Input source was:\n', src |
|
657 | 657 | except EOFError: |
|
658 | 658 | print 'Bye' |
@@ -1,357 +1,357 b'' | |||
|
1 | 1 | """Tests for various magic functions. |
|
2 | 2 | |
|
3 | 3 | Needs to be run by nose (to make ipython session available). |
|
4 | 4 | """ |
|
5 | 5 | from __future__ import absolute_import |
|
6 | 6 | |
|
7 | 7 | #----------------------------------------------------------------------------- |
|
8 | 8 | # Imports |
|
9 | 9 | #----------------------------------------------------------------------------- |
|
10 | 10 | |
|
11 | 11 | import os |
|
12 | 12 | import sys |
|
13 | 13 | import tempfile |
|
14 | 14 | import types |
|
15 | 15 | from cStringIO import StringIO |
|
16 | 16 | |
|
17 | 17 | import nose.tools as nt |
|
18 | 18 | |
|
19 | 19 | from IPython.utils.path import get_long_path_name |
|
20 | 20 | from IPython.testing import decorators as dec |
|
21 | 21 | from IPython.testing import tools as tt |
|
22 | 22 | |
|
23 | 23 | #----------------------------------------------------------------------------- |
|
24 | 24 | # Test functions begin |
|
25 | 25 | #----------------------------------------------------------------------------- |
|
26 | 26 | def test_rehashx(): |
|
27 | 27 | # clear up everything |
|
28 | 28 | _ip = get_ipython() |
|
29 | 29 | _ip.alias_manager.alias_table.clear() |
|
30 | 30 | del _ip.db['syscmdlist'] |
|
31 | 31 | |
|
32 | 32 | _ip.magic('rehashx') |
|
33 | 33 | # Practically ALL ipython development systems will have more than 10 aliases |
|
34 | 34 | |
|
35 | 35 | yield (nt.assert_true, len(_ip.alias_manager.alias_table) > 10) |
|
36 | for key, val in _ip.alias_manager.alias_table.items(): | |
|
36 | for key, val in _ip.alias_manager.alias_table.iteritems(): | |
|
37 | 37 | # we must strip dots from alias names |
|
38 | 38 | nt.assert_true('.' not in key) |
|
39 | 39 | |
|
40 | 40 | # rehashx must fill up syscmdlist |
|
41 | 41 | scoms = _ip.db['syscmdlist'] |
|
42 | 42 | yield (nt.assert_true, len(scoms) > 10) |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | def test_magic_parse_options(): |
|
46 | 46 | """Test that we don't mangle paths when parsing magic options.""" |
|
47 | 47 | ip = get_ipython() |
|
48 | 48 | path = 'c:\\x' |
|
49 | 49 | opts = ip.parse_options('-f %s' % path,'f:')[0] |
|
50 | 50 | # argv splitting is os-dependent |
|
51 | 51 | if os.name == 'posix': |
|
52 | 52 | expected = 'c:x' |
|
53 | 53 | else: |
|
54 | 54 | expected = path |
|
55 | 55 | nt.assert_equals(opts['f'], expected) |
|
56 | 56 | |
|
57 | 57 | |
|
58 | 58 | def doctest_hist_f(): |
|
59 | 59 | """Test %hist -f with temporary filename. |
|
60 | 60 | |
|
61 | 61 | In [9]: import tempfile |
|
62 | 62 | |
|
63 | 63 | In [10]: tfile = tempfile.mktemp('.py','tmp-ipython-') |
|
64 | 64 | |
|
65 | 65 | In [11]: %hist -n -f $tfile 3 |
|
66 | 66 | |
|
67 | 67 | In [13]: import os; os.unlink(tfile) |
|
68 | 68 | """ |
|
69 | 69 | |
|
70 | 70 | |
|
71 | 71 | def doctest_hist_r(): |
|
72 | 72 | """Test %hist -r |
|
73 | 73 | |
|
74 | 74 | XXX - This test is not recording the output correctly. For some reason, in |
|
75 | 75 | testing mode the raw history isn't getting populated. No idea why. |
|
76 | 76 | Disabling the output checking for now, though at least we do run it. |
|
77 | 77 | |
|
78 | 78 | In [1]: 'hist' in _ip.lsmagic() |
|
79 | 79 | Out[1]: True |
|
80 | 80 | |
|
81 | 81 | In [2]: x=1 |
|
82 | 82 | |
|
83 | 83 | In [3]: %hist -r 2 |
|
84 | 84 | x=1 # random |
|
85 | 85 | %hist -r 2 |
|
86 | 86 | """ |
|
87 | 87 | |
|
88 | 88 | def doctest_hist_op(): |
|
89 | 89 | """Test %hist -op |
|
90 | 90 | |
|
91 | 91 | In [1]: class b: |
|
92 | 92 | ...: pass |
|
93 | 93 | ...: |
|
94 | 94 | |
|
95 | 95 | In [2]: class s(b): |
|
96 | 96 | ...: def __str__(self): |
|
97 | 97 | ...: return 's' |
|
98 | 98 | ...: |
|
99 | 99 | |
|
100 | 100 | In [3]: |
|
101 | 101 | |
|
102 | 102 | In [4]: class r(b): |
|
103 | 103 | ...: def __repr__(self): |
|
104 | 104 | ...: return 'r' |
|
105 | 105 | ...: |
|
106 | 106 | |
|
107 | 107 | In [5]: class sr(s,r): pass |
|
108 | 108 | ...: |
|
109 | 109 | |
|
110 | 110 | In [6]: |
|
111 | 111 | |
|
112 | 112 | In [7]: bb=b() |
|
113 | 113 | |
|
114 | 114 | In [8]: ss=s() |
|
115 | 115 | |
|
116 | 116 | In [9]: rr=r() |
|
117 | 117 | |
|
118 | 118 | In [10]: ssrr=sr() |
|
119 | 119 | |
|
120 | 120 | In [11]: bb |
|
121 | 121 | Out[11]: <...b instance at ...> |
|
122 | 122 | |
|
123 | 123 | In [12]: ss |
|
124 | 124 | Out[12]: <...s instance at ...> |
|
125 | 125 | |
|
126 | 126 | In [13]: |
|
127 | 127 | |
|
128 | 128 | In [14]: %hist -op |
|
129 | 129 | >>> class b: |
|
130 | 130 | ... pass |
|
131 | 131 | ... |
|
132 | 132 | >>> class s(b): |
|
133 | 133 | ... def __str__(self): |
|
134 | 134 | ... return 's' |
|
135 | 135 | ... |
|
136 | 136 | >>> |
|
137 | 137 | >>> class r(b): |
|
138 | 138 | ... def __repr__(self): |
|
139 | 139 | ... return 'r' |
|
140 | 140 | ... |
|
141 | 141 | >>> class sr(s,r): pass |
|
142 | 142 | >>> |
|
143 | 143 | >>> bb=b() |
|
144 | 144 | >>> ss=s() |
|
145 | 145 | >>> rr=r() |
|
146 | 146 | >>> ssrr=sr() |
|
147 | 147 | >>> bb |
|
148 | 148 | <...b instance at ...> |
|
149 | 149 | >>> ss |
|
150 | 150 | <...s instance at ...> |
|
151 | 151 | >>> |
|
152 | 152 | """ |
|
153 | 153 | |
|
154 | 154 | def test_shist(): |
|
155 | 155 | # Simple tests of ShadowHist class - test generator. |
|
156 | 156 | import os, shutil, tempfile |
|
157 | 157 | |
|
158 | 158 | from IPython.utils import pickleshare |
|
159 | 159 | from IPython.core.history import ShadowHist |
|
160 | 160 | |
|
161 | 161 | tfile = tempfile.mktemp('','tmp-ipython-') |
|
162 | 162 | |
|
163 | 163 | db = pickleshare.PickleShareDB(tfile) |
|
164 | 164 | s = ShadowHist(db) |
|
165 | 165 | s.add('hello') |
|
166 | 166 | s.add('world') |
|
167 | 167 | s.add('hello') |
|
168 | 168 | s.add('hello') |
|
169 | 169 | s.add('karhu') |
|
170 | 170 | |
|
171 | 171 | yield nt.assert_equals,s.all(),[(1, 'hello'), (2, 'world'), (3, 'karhu')] |
|
172 | 172 | |
|
173 | 173 | yield nt.assert_equal,s.get(2),'world' |
|
174 | 174 | |
|
175 | 175 | shutil.rmtree(tfile) |
|
176 | 176 | |
|
177 | 177 | |
|
178 | 178 | # XXX failing for now, until we get clearcmd out of quarantine. But we should |
|
179 | 179 | # fix this and revert the skip to happen only if numpy is not around. |
|
180 | 180 | #@dec.skipif_not_numpy |
|
181 | 181 | @dec.skip_known_failure |
|
182 | 182 | def test_numpy_clear_array_undec(): |
|
183 | 183 | from IPython.extensions import clearcmd |
|
184 | 184 | |
|
185 | 185 | _ip.ex('import numpy as np') |
|
186 | 186 | _ip.ex('a = np.empty(2)') |
|
187 | 187 | yield (nt.assert_true, 'a' in _ip.user_ns) |
|
188 | 188 | _ip.magic('clear array') |
|
189 | 189 | yield (nt.assert_false, 'a' in _ip.user_ns) |
|
190 | 190 | |
|
191 | 191 | |
|
192 | 192 | # Multiple tests for clipboard pasting |
|
193 | 193 | @dec.parametric |
|
194 | 194 | def test_paste(): |
|
195 | 195 | _ip = get_ipython() |
|
196 | 196 | def paste(txt, flags='-q'): |
|
197 | 197 | """Paste input text, by default in quiet mode""" |
|
198 | 198 | hooks.clipboard_get = lambda : txt |
|
199 | 199 | _ip.magic('paste '+flags) |
|
200 | 200 | |
|
201 | 201 | # Inject fake clipboard hook but save original so we can restore it later |
|
202 | 202 | hooks = _ip.hooks |
|
203 | 203 | user_ns = _ip.user_ns |
|
204 | 204 | original_clip = hooks.clipboard_get |
|
205 | 205 | |
|
206 | 206 | try: |
|
207 | 207 | # This try/except with an emtpy except clause is here only because |
|
208 | 208 | # try/yield/finally is invalid syntax in Python 2.4. This will be |
|
209 | 209 | # removed when we drop 2.4-compatibility, and the emtpy except below |
|
210 | 210 | # will be changed to a finally. |
|
211 | 211 | |
|
212 | 212 | # Run tests with fake clipboard function |
|
213 | 213 | user_ns.pop('x', None) |
|
214 | 214 | paste('x=1') |
|
215 | 215 | yield nt.assert_equal(user_ns['x'], 1) |
|
216 | 216 | |
|
217 | 217 | user_ns.pop('x', None) |
|
218 | 218 | paste('>>> x=2') |
|
219 | 219 | yield nt.assert_equal(user_ns['x'], 2) |
|
220 | 220 | |
|
221 | 221 | paste(""" |
|
222 | 222 | >>> x = [1,2,3] |
|
223 | 223 | >>> y = [] |
|
224 | 224 | >>> for i in x: |
|
225 | 225 | ... y.append(i**2) |
|
226 | 226 | ... |
|
227 | 227 | """) |
|
228 | 228 | yield nt.assert_equal(user_ns['x'], [1,2,3]) |
|
229 | 229 | yield nt.assert_equal(user_ns['y'], [1,4,9]) |
|
230 | 230 | |
|
231 | 231 | # Now, test that paste -r works |
|
232 | 232 | user_ns.pop('x', None) |
|
233 | 233 | yield nt.assert_false('x' in user_ns) |
|
234 | 234 | _ip.magic('paste -r') |
|
235 | 235 | yield nt.assert_equal(user_ns['x'], [1,2,3]) |
|
236 | 236 | |
|
237 | 237 | # Also test paste echoing, by temporarily faking the writer |
|
238 | 238 | w = StringIO() |
|
239 | 239 | writer = _ip.write |
|
240 | 240 | _ip.write = w.write |
|
241 | 241 | code = """ |
|
242 | 242 | a = 100 |
|
243 | 243 | b = 200""" |
|
244 | 244 | try: |
|
245 | 245 | paste(code,'') |
|
246 | 246 | out = w.getvalue() |
|
247 | 247 | finally: |
|
248 | 248 | _ip.write = writer |
|
249 | 249 | yield nt.assert_equal(user_ns['a'], 100) |
|
250 | 250 | yield nt.assert_equal(user_ns['b'], 200) |
|
251 | 251 | yield nt.assert_equal(out, code+"\n## -- End pasted text --\n") |
|
252 | 252 | |
|
253 | 253 | finally: |
|
254 | 254 | # This should be in a finally clause, instead of the bare except above. |
|
255 | 255 | # Restore original hook |
|
256 | 256 | hooks.clipboard_get = original_clip |
|
257 | 257 | |
|
258 | 258 | |
|
259 | 259 | def test_time(): |
|
260 | 260 | _ip.magic('time None') |
|
261 | 261 | |
|
262 | 262 | |
|
263 | 263 | def doctest_time(): |
|
264 | 264 | """ |
|
265 | 265 | In [10]: %time None |
|
266 | 266 | CPU times: user 0.00 s, sys: 0.00 s, total: 0.00 s |
|
267 | 267 | Wall time: 0.00 s |
|
268 | 268 | """ |
|
269 | 269 | |
|
270 | 270 | |
|
271 | 271 | def test_doctest_mode(): |
|
272 | 272 | "Toggle doctest_mode twice, it should be a no-op and run without error" |
|
273 | 273 | _ip.magic('doctest_mode') |
|
274 | 274 | _ip.magic('doctest_mode') |
|
275 | 275 | |
|
276 | 276 | |
|
277 | 277 | def test_parse_options(): |
|
278 | 278 | """Tests for basic options parsing in magics.""" |
|
279 | 279 | # These are only the most minimal of tests, more should be added later. At |
|
280 | 280 | # the very least we check that basic text/unicode calls work OK. |
|
281 | 281 | nt.assert_equal(_ip.parse_options('foo', '')[1], 'foo') |
|
282 | 282 | nt.assert_equal(_ip.parse_options(u'foo', '')[1], u'foo') |
|
283 | 283 | |
|
284 | 284 | |
|
285 | 285 | def test_dirops(): |
|
286 | 286 | """Test various directory handling operations.""" |
|
287 | 287 | curpath = lambda :os.path.splitdrive(os.getcwd())[1].replace('\\','/') |
|
288 | 288 | |
|
289 | 289 | startdir = os.getcwd() |
|
290 | 290 | ipdir = _ip.ipython_dir |
|
291 | 291 | try: |
|
292 | 292 | _ip.magic('cd "%s"' % ipdir) |
|
293 | 293 | nt.assert_equal(curpath(), ipdir) |
|
294 | 294 | _ip.magic('cd -') |
|
295 | 295 | nt.assert_equal(curpath(), startdir) |
|
296 | 296 | _ip.magic('pushd "%s"' % ipdir) |
|
297 | 297 | nt.assert_equal(curpath(), ipdir) |
|
298 | 298 | _ip.magic('popd') |
|
299 | 299 | nt.assert_equal(curpath(), startdir) |
|
300 | 300 | finally: |
|
301 | 301 | os.chdir(startdir) |
|
302 | 302 | |
|
303 | 303 | |
|
304 | 304 | def check_cpaste(code, should_fail=False): |
|
305 | 305 | """Execute code via 'cpaste' and ensure it was executed, unless |
|
306 | 306 | should_fail is set. |
|
307 | 307 | """ |
|
308 | 308 | _ip.user_ns['code_ran'] = False |
|
309 | 309 | |
|
310 | 310 | src = StringIO() |
|
311 | 311 | src.write('\n') |
|
312 | 312 | src.write(code) |
|
313 | 313 | src.write('\n--\n') |
|
314 | 314 | src.seek(0) |
|
315 | 315 | |
|
316 | 316 | stdin_save = sys.stdin |
|
317 | 317 | sys.stdin = src |
|
318 | 318 | |
|
319 | 319 | try: |
|
320 | 320 | _ip.magic('cpaste') |
|
321 | 321 | except: |
|
322 | 322 | if not should_fail: |
|
323 | 323 | raise AssertionError("Failure not expected : '%s'" % |
|
324 | 324 | code) |
|
325 | 325 | else: |
|
326 | 326 | assert _ip.user_ns['code_ran'] |
|
327 | 327 | if should_fail: |
|
328 | 328 | raise AssertionError("Failure expected : '%s'" % code) |
|
329 | 329 | finally: |
|
330 | 330 | sys.stdin = stdin_save |
|
331 | 331 | |
|
332 | 332 | |
|
333 | 333 | def test_cpaste(): |
|
334 | 334 | """Test cpaste magic""" |
|
335 | 335 | |
|
336 | 336 | def run(): |
|
337 | 337 | """Marker function: sets a flag when executed. |
|
338 | 338 | """ |
|
339 | 339 | _ip.user_ns['code_ran'] = True |
|
340 | 340 | return 'run' # return string so '+ run()' doesn't result in success |
|
341 | 341 | |
|
342 | 342 | tests = {'pass': ["> > > run()", |
|
343 | 343 | ">>> > run()", |
|
344 | 344 | "+++ run()", |
|
345 | 345 | "++ run()", |
|
346 | 346 | " >>> run()"], |
|
347 | 347 | |
|
348 | 348 | 'fail': ["+ + run()", |
|
349 | 349 | " ++ run()"]} |
|
350 | 350 | |
|
351 | 351 | _ip.user_ns['run'] = run |
|
352 | 352 | |
|
353 | 353 | for code in tests['pass']: |
|
354 | 354 | check_cpaste(code) |
|
355 | 355 | |
|
356 | 356 | for code in tests['fail']: |
|
357 | 357 | check_cpaste(code, should_fail=True) |
@@ -1,1224 +1,1224 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | ultratb.py -- Spice up your tracebacks! |
|
4 | 4 | |
|
5 | 5 | * ColorTB |
|
6 | 6 | I've always found it a bit hard to visually parse tracebacks in Python. The |
|
7 | 7 | ColorTB class is a solution to that problem. It colors the different parts of a |
|
8 | 8 | traceback in a manner similar to what you would expect from a syntax-highlighting |
|
9 | 9 | text editor. |
|
10 | 10 | |
|
11 | 11 | Installation instructions for ColorTB: |
|
12 | 12 | import sys,ultratb |
|
13 | 13 | sys.excepthook = ultratb.ColorTB() |
|
14 | 14 | |
|
15 | 15 | * VerboseTB |
|
16 | 16 | I've also included a port of Ka-Ping Yee's "cgitb.py" that produces all kinds |
|
17 | 17 | of useful info when a traceback occurs. Ping originally had it spit out HTML |
|
18 | 18 | and intended it for CGI programmers, but why should they have all the fun? I |
|
19 | 19 | altered it to spit out colored text to the terminal. It's a bit overwhelming, |
|
20 | 20 | but kind of neat, and maybe useful for long-running programs that you believe |
|
21 | 21 | are bug-free. If a crash *does* occur in that type of program you want details. |
|
22 | 22 | Give it a shot--you'll love it or you'll hate it. |
|
23 | 23 | |
|
24 | 24 | Note: |
|
25 | 25 | |
|
26 | 26 | The Verbose mode prints the variables currently visible where the exception |
|
27 | 27 | happened (shortening their strings if too long). This can potentially be |
|
28 | 28 | very slow, if you happen to have a huge data structure whose string |
|
29 | 29 | representation is complex to compute. Your computer may appear to freeze for |
|
30 | 30 | a while with cpu usage at 100%. If this occurs, you can cancel the traceback |
|
31 | 31 | with Ctrl-C (maybe hitting it more than once). |
|
32 | 32 | |
|
33 | 33 | If you encounter this kind of situation often, you may want to use the |
|
34 | 34 | Verbose_novars mode instead of the regular Verbose, which avoids formatting |
|
35 | 35 | variables (but otherwise includes the information and context given by |
|
36 | 36 | Verbose). |
|
37 | 37 | |
|
38 | 38 | |
|
39 | 39 | Installation instructions for ColorTB: |
|
40 | 40 | import sys,ultratb |
|
41 | 41 | sys.excepthook = ultratb.VerboseTB() |
|
42 | 42 | |
|
43 | 43 | Note: Much of the code in this module was lifted verbatim from the standard |
|
44 | 44 | library module 'traceback.py' and Ka-Ping Yee's 'cgitb.py'. |
|
45 | 45 | |
|
46 | 46 | * Color schemes |
|
47 | 47 | The colors are defined in the class TBTools through the use of the |
|
48 | 48 | ColorSchemeTable class. Currently the following exist: |
|
49 | 49 | |
|
50 | 50 | - NoColor: allows all of this module to be used in any terminal (the color |
|
51 | 51 | escapes are just dummy blank strings). |
|
52 | 52 | |
|
53 | 53 | - Linux: is meant to look good in a terminal like the Linux console (black |
|
54 | 54 | or very dark background). |
|
55 | 55 | |
|
56 | 56 | - LightBG: similar to Linux but swaps dark/light colors to be more readable |
|
57 | 57 | in light background terminals. |
|
58 | 58 | |
|
59 | 59 | You can implement other color schemes easily, the syntax is fairly |
|
60 | 60 | self-explanatory. Please send back new schemes you develop to the author for |
|
61 | 61 | possible inclusion in future releases. |
|
62 | 62 | """ |
|
63 | 63 | |
|
64 | 64 | #***************************************************************************** |
|
65 | 65 | # Copyright (C) 2001 Nathaniel Gray <n8gray@caltech.edu> |
|
66 | 66 | # Copyright (C) 2001-2004 Fernando Perez <fperez@colorado.edu> |
|
67 | 67 | # |
|
68 | 68 | # Distributed under the terms of the BSD License. The full license is in |
|
69 | 69 | # the file COPYING, distributed as part of this software. |
|
70 | 70 | #***************************************************************************** |
|
71 | 71 | |
|
72 | 72 | from __future__ import with_statement |
|
73 | 73 | |
|
74 | 74 | import inspect |
|
75 | 75 | import keyword |
|
76 | 76 | import linecache |
|
77 | 77 | import os |
|
78 | 78 | import pydoc |
|
79 | 79 | import re |
|
80 | 80 | import string |
|
81 | 81 | import sys |
|
82 | 82 | import time |
|
83 | 83 | import tokenize |
|
84 | 84 | import traceback |
|
85 | 85 | import types |
|
86 | 86 | |
|
87 | 87 | # For purposes of monkeypatching inspect to fix a bug in it. |
|
88 | 88 | from inspect import getsourcefile, getfile, getmodule,\ |
|
89 | 89 | ismodule, isclass, ismethod, isfunction, istraceback, isframe, iscode |
|
90 | 90 | |
|
91 | 91 | # IPython's own modules |
|
92 | 92 | # Modified pdb which doesn't damage IPython's readline handling |
|
93 | 93 | from IPython.core import debugger, ipapi |
|
94 | 94 | from IPython.core.display_trap import DisplayTrap |
|
95 | 95 | from IPython.core.excolors import exception_colors |
|
96 | 96 | from IPython.utils import PyColorize |
|
97 | 97 | from IPython.utils import io |
|
98 | 98 | from IPython.utils.data import uniq_stable |
|
99 | 99 | from IPython.utils.warn import info, error |
|
100 | 100 | |
|
101 | 101 | # Globals |
|
102 | 102 | # amount of space to put line numbers before verbose tracebacks |
|
103 | 103 | INDENT_SIZE = 8 |
|
104 | 104 | |
|
105 | 105 | # Default color scheme. This is used, for example, by the traceback |
|
106 | 106 | # formatter. When running in an actual IPython instance, the user's rc.colors |
|
107 | 107 | # value is used, but havinga module global makes this functionality available |
|
108 | 108 | # to users of ultratb who are NOT running inside ipython. |
|
109 | 109 | DEFAULT_SCHEME = 'NoColor' |
|
110 | 110 | |
|
111 | 111 | #--------------------------------------------------------------------------- |
|
112 | 112 | # Code begins |
|
113 | 113 | |
|
114 | 114 | # Utility functions |
|
115 | 115 | def inspect_error(): |
|
116 | 116 | """Print a message about internal inspect errors. |
|
117 | 117 | |
|
118 | 118 | These are unfortunately quite common.""" |
|
119 | 119 | |
|
120 | 120 | error('Internal Python error in the inspect module.\n' |
|
121 | 121 | 'Below is the traceback from this internal error.\n') |
|
122 | 122 | |
|
123 | 123 | |
|
124 | 124 | def findsource(object): |
|
125 | 125 | """Return the entire source file and starting line number for an object. |
|
126 | 126 | |
|
127 | 127 | The argument may be a module, class, method, function, traceback, frame, |
|
128 | 128 | or code object. The source code is returned as a list of all the lines |
|
129 | 129 | in the file and the line number indexes a line in that list. An IOError |
|
130 | 130 | is raised if the source code cannot be retrieved. |
|
131 | 131 | |
|
132 | 132 | FIXED version with which we monkeypatch the stdlib to work around a bug.""" |
|
133 | 133 | |
|
134 | 134 | file = getsourcefile(object) or getfile(object) |
|
135 | 135 | # If the object is a frame, then trying to get the globals dict from its |
|
136 | 136 | # module won't work. Instead, the frame object itself has the globals |
|
137 | 137 | # dictionary. |
|
138 | 138 | globals_dict = None |
|
139 | 139 | if inspect.isframe(object): |
|
140 | 140 | # XXX: can this ever be false? |
|
141 | 141 | globals_dict = object.f_globals |
|
142 | 142 | else: |
|
143 | 143 | module = getmodule(object, file) |
|
144 | 144 | if module: |
|
145 | 145 | globals_dict = module.__dict__ |
|
146 | 146 | lines = linecache.getlines(file, globals_dict) |
|
147 | 147 | if not lines: |
|
148 | 148 | raise IOError('could not get source code') |
|
149 | 149 | |
|
150 | 150 | if ismodule(object): |
|
151 | 151 | return lines, 0 |
|
152 | 152 | |
|
153 | 153 | if isclass(object): |
|
154 | 154 | name = object.__name__ |
|
155 | 155 | pat = re.compile(r'^(\s*)class\s*' + name + r'\b') |
|
156 | 156 | # make some effort to find the best matching class definition: |
|
157 | 157 | # use the one with the least indentation, which is the one |
|
158 | 158 | # that's most probably not inside a function definition. |
|
159 | 159 | candidates = [] |
|
160 | 160 | for i in range(len(lines)): |
|
161 | 161 | match = pat.match(lines[i]) |
|
162 | 162 | if match: |
|
163 | 163 | # if it's at toplevel, it's already the best one |
|
164 | 164 | if lines[i][0] == 'c': |
|
165 | 165 | return lines, i |
|
166 | 166 | # else add whitespace to candidate list |
|
167 | 167 | candidates.append((match.group(1), i)) |
|
168 | 168 | if candidates: |
|
169 | 169 | # this will sort by whitespace, and by line number, |
|
170 | 170 | # less whitespace first |
|
171 | 171 | candidates.sort() |
|
172 | 172 | return lines, candidates[0][1] |
|
173 | 173 | else: |
|
174 | 174 | raise IOError('could not find class definition') |
|
175 | 175 | |
|
176 | 176 | if ismethod(object): |
|
177 | 177 | object = object.im_func |
|
178 | 178 | if isfunction(object): |
|
179 | 179 | object = object.func_code |
|
180 | 180 | if istraceback(object): |
|
181 | 181 | object = object.tb_frame |
|
182 | 182 | if isframe(object): |
|
183 | 183 | object = object.f_code |
|
184 | 184 | if iscode(object): |
|
185 | 185 | if not hasattr(object, 'co_firstlineno'): |
|
186 | 186 | raise IOError('could not find function definition') |
|
187 | 187 | pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)') |
|
188 | 188 | pmatch = pat.match |
|
189 | 189 | # fperez - fix: sometimes, co_firstlineno can give a number larger than |
|
190 | 190 | # the length of lines, which causes an error. Safeguard against that. |
|
191 | 191 | lnum = min(object.co_firstlineno,len(lines))-1 |
|
192 | 192 | while lnum > 0: |
|
193 | 193 | if pmatch(lines[lnum]): break |
|
194 | 194 | lnum -= 1 |
|
195 | 195 | |
|
196 | 196 | return lines, lnum |
|
197 | 197 | raise IOError('could not find code object') |
|
198 | 198 | |
|
199 | 199 | # Monkeypatch inspect to apply our bugfix. This code only works with py25 |
|
200 | 200 | if sys.version_info[:2] >= (2,5): |
|
201 | 201 | inspect.findsource = findsource |
|
202 | 202 | |
|
203 | 203 | def fix_frame_records_filenames(records): |
|
204 | 204 | """Try to fix the filenames in each record from inspect.getinnerframes(). |
|
205 | 205 | |
|
206 | 206 | Particularly, modules loaded from within zip files have useless filenames |
|
207 | 207 | attached to their code object, and inspect.getinnerframes() just uses it. |
|
208 | 208 | """ |
|
209 | 209 | fixed_records = [] |
|
210 | 210 | for frame, filename, line_no, func_name, lines, index in records: |
|
211 | 211 | # Look inside the frame's globals dictionary for __file__, which should |
|
212 | 212 | # be better. |
|
213 | 213 | better_fn = frame.f_globals.get('__file__', None) |
|
214 | 214 | if isinstance(better_fn, str): |
|
215 | 215 | # Check the type just in case someone did something weird with |
|
216 | 216 | # __file__. It might also be None if the error occurred during |
|
217 | 217 | # import. |
|
218 | 218 | filename = better_fn |
|
219 | 219 | fixed_records.append((frame, filename, line_no, func_name, lines, index)) |
|
220 | 220 | return fixed_records |
|
221 | 221 | |
|
222 | 222 | |
|
223 | 223 | def _fixed_getinnerframes(etb, context=1,tb_offset=0): |
|
224 | 224 | import linecache |
|
225 | 225 | LNUM_POS, LINES_POS, INDEX_POS = 2, 4, 5 |
|
226 | 226 | |
|
227 | 227 | records = fix_frame_records_filenames(inspect.getinnerframes(etb, context)) |
|
228 | 228 | |
|
229 | 229 | # If the error is at the console, don't build any context, since it would |
|
230 | 230 | # otherwise produce 5 blank lines printed out (there is no file at the |
|
231 | 231 | # console) |
|
232 | 232 | rec_check = records[tb_offset:] |
|
233 | 233 | try: |
|
234 | 234 | rname = rec_check[0][1] |
|
235 | 235 | if rname == '<ipython console>' or rname.endswith('<string>'): |
|
236 | 236 | return rec_check |
|
237 | 237 | except IndexError: |
|
238 | 238 | pass |
|
239 | 239 | |
|
240 | 240 | aux = traceback.extract_tb(etb) |
|
241 | 241 | assert len(records) == len(aux) |
|
242 | 242 | for i, (file, lnum, _, _) in zip(range(len(records)), aux): |
|
243 | 243 | maybeStart = lnum-1 - context//2 |
|
244 | 244 | start = max(maybeStart, 0) |
|
245 | 245 | end = start + context |
|
246 | 246 | lines = linecache.getlines(file)[start:end] |
|
247 | 247 | # pad with empty lines if necessary |
|
248 | 248 | if maybeStart < 0: |
|
249 | 249 | lines = (['\n'] * -maybeStart) + lines |
|
250 | 250 | if len(lines) < context: |
|
251 | 251 | lines += ['\n'] * (context - len(lines)) |
|
252 | 252 | buf = list(records[i]) |
|
253 | 253 | buf[LNUM_POS] = lnum |
|
254 | 254 | buf[INDEX_POS] = lnum - 1 - start |
|
255 | 255 | buf[LINES_POS] = lines |
|
256 | 256 | records[i] = tuple(buf) |
|
257 | 257 | return records[tb_offset:] |
|
258 | 258 | |
|
259 | 259 | # Helper function -- largely belongs to VerboseTB, but we need the same |
|
260 | 260 | # functionality to produce a pseudo verbose TB for SyntaxErrors, so that they |
|
261 | 261 | # can be recognized properly by ipython.el's py-traceback-line-re |
|
262 | 262 | # (SyntaxErrors have to be treated specially because they have no traceback) |
|
263 | 263 | |
|
264 | 264 | _parser = PyColorize.Parser() |
|
265 | 265 | |
|
266 | 266 | def _format_traceback_lines(lnum, index, lines, Colors, lvals=None,scheme=None): |
|
267 | 267 | numbers_width = INDENT_SIZE - 1 |
|
268 | 268 | res = [] |
|
269 | 269 | i = lnum - index |
|
270 | 270 | |
|
271 | 271 | # This lets us get fully syntax-highlighted tracebacks. |
|
272 | 272 | if scheme is None: |
|
273 | 273 | ipinst = ipapi.get() |
|
274 | 274 | if ipinst is not None: |
|
275 | 275 | scheme = ipinst.colors |
|
276 | 276 | else: |
|
277 | 277 | scheme = DEFAULT_SCHEME |
|
278 | 278 | |
|
279 | 279 | _line_format = _parser.format2 |
|
280 | 280 | |
|
281 | 281 | for line in lines: |
|
282 | 282 | new_line, err = _line_format(line,'str',scheme) |
|
283 | 283 | if not err: line = new_line |
|
284 | 284 | |
|
285 | 285 | if i == lnum: |
|
286 | 286 | # This is the line with the error |
|
287 | 287 | pad = numbers_width - len(str(i)) |
|
288 | 288 | if pad >= 3: |
|
289 | 289 | marker = '-'*(pad-3) + '-> ' |
|
290 | 290 | elif pad == 2: |
|
291 | 291 | marker = '> ' |
|
292 | 292 | elif pad == 1: |
|
293 | 293 | marker = '>' |
|
294 | 294 | else: |
|
295 | 295 | marker = '' |
|
296 | 296 | num = marker + str(i) |
|
297 | 297 | line = '%s%s%s %s%s' %(Colors.linenoEm, num, |
|
298 | 298 | Colors.line, line, Colors.Normal) |
|
299 | 299 | else: |
|
300 | 300 | num = '%*s' % (numbers_width,i) |
|
301 | 301 | line = '%s%s%s %s' %(Colors.lineno, num, |
|
302 | 302 | Colors.Normal, line) |
|
303 | 303 | |
|
304 | 304 | res.append(line) |
|
305 | 305 | if lvals and i == lnum: |
|
306 | 306 | res.append(lvals + '\n') |
|
307 | 307 | i = i + 1 |
|
308 | 308 | return res |
|
309 | 309 | |
|
310 | 310 | |
|
311 | 311 | #--------------------------------------------------------------------------- |
|
312 | 312 | # Module classes |
|
313 | 313 | class TBTools(object): |
|
314 | 314 | """Basic tools used by all traceback printer classes.""" |
|
315 | 315 | |
|
316 | 316 | # Number of frames to skip when reporting tracebacks |
|
317 | 317 | tb_offset = 0 |
|
318 | 318 | |
|
319 | 319 | def __init__(self, color_scheme='NoColor', call_pdb=False, ostream=None): |
|
320 | 320 | # Whether to call the interactive pdb debugger after printing |
|
321 | 321 | # tracebacks or not |
|
322 | 322 | self.call_pdb = call_pdb |
|
323 | 323 | |
|
324 | 324 | # Output stream to write to. Note that we store the original value in |
|
325 | 325 | # a private attribute and then make the public ostream a property, so |
|
326 | 326 | # that we can delay accessing io.Term.cout until runtime. The way |
|
327 | 327 | # things are written now, the Term.cout object is dynamically managed |
|
328 | 328 | # so a reference to it should NEVER be stored statically. This |
|
329 | 329 | # property approach confines this detail to a single location, and all |
|
330 | 330 | # subclasses can simply access self.ostream for writing. |
|
331 | 331 | self._ostream = ostream |
|
332 | 332 | |
|
333 | 333 | # Create color table |
|
334 | 334 | self.color_scheme_table = exception_colors() |
|
335 | 335 | |
|
336 | 336 | self.set_colors(color_scheme) |
|
337 | 337 | self.old_scheme = color_scheme # save initial value for toggles |
|
338 | 338 | |
|
339 | 339 | if call_pdb: |
|
340 | 340 | self.pdb = debugger.Pdb(self.color_scheme_table.active_scheme_name) |
|
341 | 341 | else: |
|
342 | 342 | self.pdb = None |
|
343 | 343 | |
|
344 | 344 | def _get_ostream(self): |
|
345 | 345 | """Output stream that exceptions are written to. |
|
346 | 346 | |
|
347 | 347 | Valid values are: |
|
348 | 348 | |
|
349 | 349 | - None: the default, which means that IPython will dynamically resolve |
|
350 | 350 | to io.Term.cout. This ensures compatibility with most tools, including |
|
351 | 351 | Windows (where plain stdout doesn't recognize ANSI escapes). |
|
352 | 352 | |
|
353 | 353 | - Any object with 'write' and 'flush' attributes. |
|
354 | 354 | """ |
|
355 | 355 | return io.Term.cout if self._ostream is None else self._ostream |
|
356 | 356 | |
|
357 | 357 | def _set_ostream(self, val): |
|
358 | 358 | assert val is None or (hasattr(val, 'write') and hasattr(val, 'flush')) |
|
359 | 359 | self._ostream = val |
|
360 | 360 | |
|
361 | 361 | ostream = property(_get_ostream, _set_ostream) |
|
362 | 362 | |
|
363 | 363 | def set_colors(self,*args,**kw): |
|
364 | 364 | """Shorthand access to the color table scheme selector method.""" |
|
365 | 365 | |
|
366 | 366 | # Set own color table |
|
367 | 367 | self.color_scheme_table.set_active_scheme(*args,**kw) |
|
368 | 368 | # for convenience, set Colors to the active scheme |
|
369 | 369 | self.Colors = self.color_scheme_table.active_colors |
|
370 | 370 | # Also set colors of debugger |
|
371 | 371 | if hasattr(self,'pdb') and self.pdb is not None: |
|
372 | 372 | self.pdb.set_colors(*args,**kw) |
|
373 | 373 | |
|
374 | 374 | def color_toggle(self): |
|
375 | 375 | """Toggle between the currently active color scheme and NoColor.""" |
|
376 | 376 | |
|
377 | 377 | if self.color_scheme_table.active_scheme_name == 'NoColor': |
|
378 | 378 | self.color_scheme_table.set_active_scheme(self.old_scheme) |
|
379 | 379 | self.Colors = self.color_scheme_table.active_colors |
|
380 | 380 | else: |
|
381 | 381 | self.old_scheme = self.color_scheme_table.active_scheme_name |
|
382 | 382 | self.color_scheme_table.set_active_scheme('NoColor') |
|
383 | 383 | self.Colors = self.color_scheme_table.active_colors |
|
384 | 384 | |
|
385 | 385 | def stb2text(self, stb): |
|
386 | 386 | """Convert a structured traceback (a list) to a string.""" |
|
387 | 387 | return '\n'.join(stb) |
|
388 | 388 | |
|
389 | 389 | def text(self, etype, value, tb, tb_offset=None, context=5): |
|
390 | 390 | """Return formatted traceback. |
|
391 | 391 | |
|
392 | 392 | Subclasses may override this if they add extra arguments. |
|
393 | 393 | """ |
|
394 | 394 | tb_list = self.structured_traceback(etype, value, tb, |
|
395 | 395 | tb_offset, context) |
|
396 | 396 | return self.stb2text(tb_list) |
|
397 | 397 | |
|
398 | 398 | def structured_traceback(self, etype, evalue, tb, tb_offset=None, |
|
399 | 399 | context=5, mode=None): |
|
400 | 400 | """Return a list of traceback frames. |
|
401 | 401 | |
|
402 | 402 | Must be implemented by each class. |
|
403 | 403 | """ |
|
404 | 404 | raise NotImplementedError() |
|
405 | 405 | |
|
406 | 406 | |
|
407 | 407 | #--------------------------------------------------------------------------- |
|
408 | 408 | class ListTB(TBTools): |
|
409 | 409 | """Print traceback information from a traceback list, with optional color. |
|
410 | 410 | |
|
411 | 411 | Calling: requires 3 arguments: |
|
412 | 412 | (etype, evalue, elist) |
|
413 | 413 | as would be obtained by: |
|
414 | 414 | etype, evalue, tb = sys.exc_info() |
|
415 | 415 | if tb: |
|
416 | 416 | elist = traceback.extract_tb(tb) |
|
417 | 417 | else: |
|
418 | 418 | elist = None |
|
419 | 419 | |
|
420 | 420 | It can thus be used by programs which need to process the traceback before |
|
421 | 421 | printing (such as console replacements based on the code module from the |
|
422 | 422 | standard library). |
|
423 | 423 | |
|
424 | 424 | Because they are meant to be called without a full traceback (only a |
|
425 | 425 | list), instances of this class can't call the interactive pdb debugger.""" |
|
426 | 426 | |
|
427 | 427 | def __init__(self,color_scheme = 'NoColor', call_pdb=False, ostream=None): |
|
428 | 428 | TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, |
|
429 | 429 | ostream=ostream) |
|
430 | 430 | |
|
431 | 431 | def __call__(self, etype, value, elist): |
|
432 | 432 | self.ostream.flush() |
|
433 | 433 | self.ostream.write(self.text(etype, value, elist)) |
|
434 | 434 | self.ostream.write('\n') |
|
435 | 435 | |
|
436 | 436 | def structured_traceback(self, etype, value, elist, tb_offset=None, |
|
437 | 437 | context=5): |
|
438 | 438 | """Return a color formatted string with the traceback info. |
|
439 | 439 | |
|
440 | 440 | Parameters |
|
441 | 441 | ---------- |
|
442 | 442 | etype : exception type |
|
443 | 443 | Type of the exception raised. |
|
444 | 444 | |
|
445 | 445 | value : object |
|
446 | 446 | Data stored in the exception |
|
447 | 447 | |
|
448 | 448 | elist : list |
|
449 | 449 | List of frames, see class docstring for details. |
|
450 | 450 | |
|
451 | 451 | tb_offset : int, optional |
|
452 | 452 | Number of frames in the traceback to skip. If not given, the |
|
453 | 453 | instance value is used (set in constructor). |
|
454 | 454 | |
|
455 | 455 | context : int, optional |
|
456 | 456 | Number of lines of context information to print. |
|
457 | 457 | |
|
458 | 458 | Returns |
|
459 | 459 | ------- |
|
460 | 460 | String with formatted exception. |
|
461 | 461 | """ |
|
462 | 462 | tb_offset = self.tb_offset if tb_offset is None else tb_offset |
|
463 | 463 | Colors = self.Colors |
|
464 | 464 | out_list = [] |
|
465 | 465 | if elist: |
|
466 | 466 | |
|
467 | 467 | if tb_offset and len(elist) > tb_offset: |
|
468 | 468 | elist = elist[tb_offset:] |
|
469 | 469 | |
|
470 | 470 | out_list.append('Traceback %s(most recent call last)%s:' % |
|
471 | 471 | (Colors.normalEm, Colors.Normal) + '\n') |
|
472 | 472 | out_list.extend(self._format_list(elist)) |
|
473 | 473 | # The exception info should be a single entry in the list. |
|
474 | 474 | lines = ''.join(self._format_exception_only(etype, value)) |
|
475 | 475 | out_list.append(lines) |
|
476 | 476 | |
|
477 | 477 | # Note: this code originally read: |
|
478 | 478 | |
|
479 | 479 | ## for line in lines[:-1]: |
|
480 | 480 | ## out_list.append(" "+line) |
|
481 | 481 | ## out_list.append(lines[-1]) |
|
482 | 482 | |
|
483 | 483 | # This means it was indenting everything but the last line by a little |
|
484 | 484 | # bit. I've disabled this for now, but if we see ugliness somewhre we |
|
485 | 485 | # can restore it. |
|
486 | 486 | |
|
487 | 487 | return out_list |
|
488 | 488 | |
|
489 | 489 | def _format_list(self, extracted_list): |
|
490 | 490 | """Format a list of traceback entry tuples for printing. |
|
491 | 491 | |
|
492 | 492 | Given a list of tuples as returned by extract_tb() or |
|
493 | 493 | extract_stack(), return a list of strings ready for printing. |
|
494 | 494 | Each string in the resulting list corresponds to the item with the |
|
495 | 495 | same index in the argument list. Each string ends in a newline; |
|
496 | 496 | the strings may contain internal newlines as well, for those items |
|
497 | 497 | whose source text line is not None. |
|
498 | 498 | |
|
499 | 499 | Lifted almost verbatim from traceback.py |
|
500 | 500 | """ |
|
501 | 501 | |
|
502 | 502 | Colors = self.Colors |
|
503 | 503 | list = [] |
|
504 | 504 | for filename, lineno, name, line in extracted_list[:-1]: |
|
505 | 505 | item = ' File %s"%s"%s, line %s%d%s, in %s%s%s\n' % \ |
|
506 | 506 | (Colors.filename, filename, Colors.Normal, |
|
507 | 507 | Colors.lineno, lineno, Colors.Normal, |
|
508 | 508 | Colors.name, name, Colors.Normal) |
|
509 | 509 | if line: |
|
510 | 510 | item = item + ' %s\n' % line.strip() |
|
511 | 511 | list.append(item) |
|
512 | 512 | # Emphasize the last entry |
|
513 | 513 | filename, lineno, name, line = extracted_list[-1] |
|
514 | 514 | item = '%s File %s"%s"%s, line %s%d%s, in %s%s%s%s\n' % \ |
|
515 | 515 | (Colors.normalEm, |
|
516 | 516 | Colors.filenameEm, filename, Colors.normalEm, |
|
517 | 517 | Colors.linenoEm, lineno, Colors.normalEm, |
|
518 | 518 | Colors.nameEm, name, Colors.normalEm, |
|
519 | 519 | Colors.Normal) |
|
520 | 520 | if line: |
|
521 | 521 | item = item + '%s %s%s\n' % (Colors.line, line.strip(), |
|
522 | 522 | Colors.Normal) |
|
523 | 523 | list.append(item) |
|
524 | 524 | #from pprint import pformat; print 'LISTTB', pformat(list) # dbg |
|
525 | 525 | return list |
|
526 | 526 | |
|
527 | 527 | def _format_exception_only(self, etype, value): |
|
528 | 528 | """Format the exception part of a traceback. |
|
529 | 529 | |
|
530 | 530 | The arguments are the exception type and value such as given by |
|
531 | 531 | sys.exc_info()[:2]. The return value is a list of strings, each ending |
|
532 | 532 | in a newline. Normally, the list contains a single string; however, |
|
533 | 533 | for SyntaxError exceptions, it contains several lines that (when |
|
534 | 534 | printed) display detailed information about where the syntax error |
|
535 | 535 | occurred. The message indicating which exception occurred is the |
|
536 | 536 | always last string in the list. |
|
537 | 537 | |
|
538 | 538 | Also lifted nearly verbatim from traceback.py |
|
539 | 539 | """ |
|
540 | 540 | |
|
541 | 541 | have_filedata = False |
|
542 | 542 | Colors = self.Colors |
|
543 | 543 | list = [] |
|
544 | 544 | try: |
|
545 | 545 | stype = Colors.excName + etype.__name__ + Colors.Normal |
|
546 | 546 | except AttributeError: |
|
547 | 547 | stype = etype # String exceptions don't get special coloring |
|
548 | 548 | if value is None: |
|
549 | 549 | list.append( str(stype) + '\n') |
|
550 | 550 | else: |
|
551 | 551 | if etype is SyntaxError: |
|
552 | 552 | try: |
|
553 | 553 | msg, (filename, lineno, offset, line) = value |
|
554 | 554 | except: |
|
555 | 555 | have_filedata = False |
|
556 | 556 | else: |
|
557 | 557 | have_filedata = True |
|
558 | 558 | #print 'filename is',filename # dbg |
|
559 | 559 | if not filename: filename = "<string>" |
|
560 | 560 | list.append('%s File %s"%s"%s, line %s%d%s\n' % \ |
|
561 | 561 | (Colors.normalEm, |
|
562 | 562 | Colors.filenameEm, filename, Colors.normalEm, |
|
563 | 563 | Colors.linenoEm, lineno, Colors.Normal )) |
|
564 | 564 | if line is not None: |
|
565 | 565 | i = 0 |
|
566 | 566 | while i < len(line) and line[i].isspace(): |
|
567 | 567 | i = i+1 |
|
568 | 568 | list.append('%s %s%s\n' % (Colors.line, |
|
569 | 569 | line.strip(), |
|
570 | 570 | Colors.Normal)) |
|
571 | 571 | if offset is not None: |
|
572 | 572 | s = ' ' |
|
573 | 573 | for c in line[i:offset-1]: |
|
574 | 574 | if c.isspace(): |
|
575 | 575 | s = s + c |
|
576 | 576 | else: |
|
577 | 577 | s = s + ' ' |
|
578 | 578 | list.append('%s%s^%s\n' % (Colors.caret, s, |
|
579 | 579 | Colors.Normal) ) |
|
580 | 580 | value = msg |
|
581 | 581 | s = self._some_str(value) |
|
582 | 582 | if s: |
|
583 | 583 | list.append('%s%s:%s %s\n' % (str(stype), Colors.excName, |
|
584 | 584 | Colors.Normal, s)) |
|
585 | 585 | else: |
|
586 | 586 | list.append('%s\n' % str(stype)) |
|
587 | 587 | |
|
588 | 588 | # sync with user hooks |
|
589 | 589 | if have_filedata: |
|
590 | 590 | ipinst = ipapi.get() |
|
591 | 591 | if ipinst is not None: |
|
592 | 592 | ipinst.hooks.synchronize_with_editor(filename, lineno, 0) |
|
593 | 593 | |
|
594 | 594 | return list |
|
595 | 595 | |
|
596 | 596 | def get_exception_only(self, etype, value): |
|
597 | 597 | """Only print the exception type and message, without a traceback. |
|
598 | 598 | |
|
599 | 599 | Parameters |
|
600 | 600 | ---------- |
|
601 | 601 | etype : exception type |
|
602 | 602 | value : exception value |
|
603 | 603 | """ |
|
604 | 604 | return ListTB.structured_traceback(self, etype, value, []) |
|
605 | 605 | |
|
606 | 606 | |
|
607 | 607 | def show_exception_only(self, etype, evalue): |
|
608 | 608 | """Only print the exception type and message, without a traceback. |
|
609 | 609 | |
|
610 | 610 | Parameters |
|
611 | 611 | ---------- |
|
612 | 612 | etype : exception type |
|
613 | 613 | value : exception value |
|
614 | 614 | """ |
|
615 | 615 | # This method needs to use __call__ from *this* class, not the one from |
|
616 | 616 | # a subclass whose signature or behavior may be different |
|
617 | 617 | ostream = self.ostream |
|
618 | 618 | ostream.flush() |
|
619 | 619 | ostream.write('\n'.join(self.get_exception_only(etype, evalue))) |
|
620 | 620 | ostream.flush() |
|
621 | 621 | |
|
622 | 622 | def _some_str(self, value): |
|
623 | 623 | # Lifted from traceback.py |
|
624 | 624 | try: |
|
625 | 625 | return str(value) |
|
626 | 626 | except: |
|
627 | 627 | return '<unprintable %s object>' % type(value).__name__ |
|
628 | 628 | |
|
629 | 629 | #---------------------------------------------------------------------------- |
|
630 | 630 | class VerboseTB(TBTools): |
|
631 | 631 | """A port of Ka-Ping Yee's cgitb.py module that outputs color text instead |
|
632 | 632 | of HTML. Requires inspect and pydoc. Crazy, man. |
|
633 | 633 | |
|
634 | 634 | Modified version which optionally strips the topmost entries from the |
|
635 | 635 | traceback, to be used with alternate interpreters (because their own code |
|
636 | 636 | would appear in the traceback).""" |
|
637 | 637 | |
|
638 | 638 | def __init__(self,color_scheme = 'Linux', call_pdb=False, ostream=None, |
|
639 | 639 | tb_offset=0, long_header=False, include_vars=True): |
|
640 | 640 | """Specify traceback offset, headers and color scheme. |
|
641 | 641 | |
|
642 | 642 | Define how many frames to drop from the tracebacks. Calling it with |
|
643 | 643 | tb_offset=1 allows use of this handler in interpreters which will have |
|
644 | 644 | their own code at the top of the traceback (VerboseTB will first |
|
645 | 645 | remove that frame before printing the traceback info).""" |
|
646 | 646 | TBTools.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, |
|
647 | 647 | ostream=ostream) |
|
648 | 648 | self.tb_offset = tb_offset |
|
649 | 649 | self.long_header = long_header |
|
650 | 650 | self.include_vars = include_vars |
|
651 | 651 | |
|
652 | 652 | def structured_traceback(self, etype, evalue, etb, tb_offset=None, |
|
653 | 653 | context=5): |
|
654 | 654 | """Return a nice text document describing the traceback.""" |
|
655 | 655 | |
|
656 | 656 | tb_offset = self.tb_offset if tb_offset is None else tb_offset |
|
657 | 657 | |
|
658 | 658 | # some locals |
|
659 | 659 | try: |
|
660 | 660 | etype = etype.__name__ |
|
661 | 661 | except AttributeError: |
|
662 | 662 | pass |
|
663 | 663 | Colors = self.Colors # just a shorthand + quicker name lookup |
|
664 | 664 | ColorsNormal = Colors.Normal # used a lot |
|
665 | 665 | col_scheme = self.color_scheme_table.active_scheme_name |
|
666 | 666 | indent = ' '*INDENT_SIZE |
|
667 | 667 | em_normal = '%s\n%s%s' % (Colors.valEm, indent,ColorsNormal) |
|
668 | 668 | undefined = '%sundefined%s' % (Colors.em, ColorsNormal) |
|
669 | 669 | exc = '%s%s%s' % (Colors.excName,etype,ColorsNormal) |
|
670 | 670 | |
|
671 | 671 | # some internal-use functions |
|
672 | 672 | def text_repr(value): |
|
673 | 673 | """Hopefully pretty robust repr equivalent.""" |
|
674 | 674 | # this is pretty horrible but should always return *something* |
|
675 | 675 | try: |
|
676 | 676 | return pydoc.text.repr(value) |
|
677 | 677 | except KeyboardInterrupt: |
|
678 | 678 | raise |
|
679 | 679 | except: |
|
680 | 680 | try: |
|
681 | 681 | return repr(value) |
|
682 | 682 | except KeyboardInterrupt: |
|
683 | 683 | raise |
|
684 | 684 | except: |
|
685 | 685 | try: |
|
686 | 686 | # all still in an except block so we catch |
|
687 | 687 | # getattr raising |
|
688 | 688 | name = getattr(value, '__name__', None) |
|
689 | 689 | if name: |
|
690 | 690 | # ick, recursion |
|
691 | 691 | return text_repr(name) |
|
692 | 692 | klass = getattr(value, '__class__', None) |
|
693 | 693 | if klass: |
|
694 | 694 | return '%s instance' % text_repr(klass) |
|
695 | 695 | except KeyboardInterrupt: |
|
696 | 696 | raise |
|
697 | 697 | except: |
|
698 | 698 | return 'UNRECOVERABLE REPR FAILURE' |
|
699 | 699 | def eqrepr(value, repr=text_repr): return '=%s' % repr(value) |
|
700 | 700 | def nullrepr(value, repr=text_repr): return '' |
|
701 | 701 | |
|
702 | 702 | # meat of the code begins |
|
703 | 703 | try: |
|
704 | 704 | etype = etype.__name__ |
|
705 | 705 | except AttributeError: |
|
706 | 706 | pass |
|
707 | 707 | |
|
708 | 708 | if self.long_header: |
|
709 | 709 | # Header with the exception type, python version, and date |
|
710 | 710 | pyver = 'Python ' + string.split(sys.version)[0] + ': ' + sys.executable |
|
711 | 711 | date = time.ctime(time.time()) |
|
712 | 712 | |
|
713 | 713 | head = '%s%s%s\n%s%s%s\n%s' % (Colors.topline, '-'*75, ColorsNormal, |
|
714 | 714 | exc, ' '*(75-len(str(etype))-len(pyver)), |
|
715 |
pyver, |
|
|
715 | pyver, date.rjust(75) ) | |
|
716 | 716 | head += "\nA problem occured executing Python code. Here is the sequence of function"\ |
|
717 | 717 | "\ncalls leading up to the error, with the most recent (innermost) call last." |
|
718 | 718 | else: |
|
719 | 719 | # Simplified header |
|
720 | 720 | head = '%s%s%s\n%s%s' % (Colors.topline, '-'*75, ColorsNormal,exc, |
|
721 |
|
|
|
722 | 75 - len(str(etype)) ) ) | |
|
721 | 'Traceback (most recent call last)'.\ | |
|
722 | rjust(75 - len(str(etype)) ) ) | |
|
723 | 723 | frames = [] |
|
724 | 724 | # Flush cache before calling inspect. This helps alleviate some of the |
|
725 | 725 | # problems with python 2.3's inspect.py. |
|
726 | 726 | linecache.checkcache() |
|
727 | 727 | # Drop topmost frames if requested |
|
728 | 728 | try: |
|
729 | 729 | # Try the default getinnerframes and Alex's: Alex's fixes some |
|
730 | 730 | # problems, but it generates empty tracebacks for console errors |
|
731 | 731 | # (5 blanks lines) where none should be returned. |
|
732 | 732 | #records = inspect.getinnerframes(etb, context)[tb_offset:] |
|
733 | 733 | #print 'python records:', records # dbg |
|
734 | 734 | records = _fixed_getinnerframes(etb, context, tb_offset) |
|
735 | 735 | #print 'alex records:', records # dbg |
|
736 | 736 | except: |
|
737 | 737 | |
|
738 | 738 | # FIXME: I've been getting many crash reports from python 2.3 |
|
739 | 739 | # users, traceable to inspect.py. If I can find a small test-case |
|
740 | 740 | # to reproduce this, I should either write a better workaround or |
|
741 | 741 | # file a bug report against inspect (if that's the real problem). |
|
742 | 742 | # So far, I haven't been able to find an isolated example to |
|
743 | 743 | # reproduce the problem. |
|
744 | 744 | inspect_error() |
|
745 | 745 | traceback.print_exc(file=self.ostream) |
|
746 | 746 | info('\nUnfortunately, your original traceback can not be constructed.\n') |
|
747 | 747 | return '' |
|
748 | 748 | |
|
749 | 749 | # build some color string templates outside these nested loops |
|
750 | 750 | tpl_link = '%s%%s%s' % (Colors.filenameEm,ColorsNormal) |
|
751 | 751 | tpl_call = 'in %s%%s%s%%s%s' % (Colors.vName, Colors.valEm, |
|
752 | 752 | ColorsNormal) |
|
753 | 753 | tpl_call_fail = 'in %s%%s%s(***failed resolving arguments***)%s' % \ |
|
754 | 754 | (Colors.vName, Colors.valEm, ColorsNormal) |
|
755 | 755 | tpl_local_var = '%s%%s%s' % (Colors.vName, ColorsNormal) |
|
756 | 756 | tpl_global_var = '%sglobal%s %s%%s%s' % (Colors.em, ColorsNormal, |
|
757 | 757 | Colors.vName, ColorsNormal) |
|
758 | 758 | tpl_name_val = '%%s %s= %%s%s' % (Colors.valEm, ColorsNormal) |
|
759 | 759 | tpl_line = '%s%%s%s %%s' % (Colors.lineno, ColorsNormal) |
|
760 | 760 | tpl_line_em = '%s%%s%s %%s%s' % (Colors.linenoEm,Colors.line, |
|
761 | 761 | ColorsNormal) |
|
762 | 762 | |
|
763 | 763 | # now, loop over all records printing context and info |
|
764 | 764 | abspath = os.path.abspath |
|
765 | 765 | for frame, file, lnum, func, lines, index in records: |
|
766 | 766 | #print '*** record:',file,lnum,func,lines,index # dbg |
|
767 | 767 | try: |
|
768 | 768 | file = file and abspath(file) or '?' |
|
769 | 769 | except OSError: |
|
770 | 770 | # if file is '<console>' or something not in the filesystem, |
|
771 | 771 | # the abspath call will throw an OSError. Just ignore it and |
|
772 | 772 | # keep the original file string. |
|
773 | 773 | pass |
|
774 | 774 | link = tpl_link % file |
|
775 | 775 | try: |
|
776 | 776 | args, varargs, varkw, locals = inspect.getargvalues(frame) |
|
777 | 777 | except: |
|
778 | 778 | # This can happen due to a bug in python2.3. We should be |
|
779 | 779 | # able to remove this try/except when 2.4 becomes a |
|
780 | 780 | # requirement. Bug details at http://python.org/sf/1005466 |
|
781 | 781 | inspect_error() |
|
782 | 782 | traceback.print_exc(file=self.ostream) |
|
783 | 783 | info("\nIPython's exception reporting continues...\n") |
|
784 | 784 | |
|
785 | 785 | if func == '?': |
|
786 | 786 | call = '' |
|
787 | 787 | else: |
|
788 | 788 | # Decide whether to include variable details or not |
|
789 | 789 | var_repr = self.include_vars and eqrepr or nullrepr |
|
790 | 790 | try: |
|
791 | 791 | call = tpl_call % (func,inspect.formatargvalues(args, |
|
792 | 792 | varargs, varkw, |
|
793 | 793 | locals,formatvalue=var_repr)) |
|
794 | 794 | except KeyError: |
|
795 | 795 | # Very odd crash from inspect.formatargvalues(). The |
|
796 | 796 | # scenario under which it appeared was a call to |
|
797 | 797 | # view(array,scale) in NumTut.view.view(), where scale had |
|
798 | 798 | # been defined as a scalar (it should be a tuple). Somehow |
|
799 | 799 | # inspect messes up resolving the argument list of view() |
|
800 | 800 | # and barfs out. At some point I should dig into this one |
|
801 | 801 | # and file a bug report about it. |
|
802 | 802 | inspect_error() |
|
803 | 803 | traceback.print_exc(file=self.ostream) |
|
804 | 804 | info("\nIPython's exception reporting continues...\n") |
|
805 | 805 | call = tpl_call_fail % func |
|
806 | 806 | |
|
807 | 807 | # Initialize a list of names on the current line, which the |
|
808 | 808 | # tokenizer below will populate. |
|
809 | 809 | names = [] |
|
810 | 810 | |
|
811 | 811 | def tokeneater(token_type, token, start, end, line): |
|
812 | 812 | """Stateful tokeneater which builds dotted names. |
|
813 | 813 | |
|
814 | 814 | The list of names it appends to (from the enclosing scope) can |
|
815 | 815 | contain repeated composite names. This is unavoidable, since |
|
816 | 816 | there is no way to disambguate partial dotted structures until |
|
817 | 817 | the full list is known. The caller is responsible for pruning |
|
818 | 818 | the final list of duplicates before using it.""" |
|
819 | 819 | |
|
820 | 820 | # build composite names |
|
821 | 821 | if token == '.': |
|
822 | 822 | try: |
|
823 | 823 | names[-1] += '.' |
|
824 | 824 | # store state so the next token is added for x.y.z names |
|
825 | 825 | tokeneater.name_cont = True |
|
826 | 826 | return |
|
827 | 827 | except IndexError: |
|
828 | 828 | pass |
|
829 | 829 | if token_type == tokenize.NAME and token not in keyword.kwlist: |
|
830 | 830 | if tokeneater.name_cont: |
|
831 | 831 | # Dotted names |
|
832 | 832 | names[-1] += token |
|
833 | 833 | tokeneater.name_cont = False |
|
834 | 834 | else: |
|
835 | 835 | # Regular new names. We append everything, the caller |
|
836 | 836 | # will be responsible for pruning the list later. It's |
|
837 | 837 | # very tricky to try to prune as we go, b/c composite |
|
838 | 838 | # names can fool us. The pruning at the end is easy |
|
839 | 839 | # to do (or the caller can print a list with repeated |
|
840 | 840 | # names if so desired. |
|
841 | 841 | names.append(token) |
|
842 | 842 | elif token_type == tokenize.NEWLINE: |
|
843 | 843 | raise IndexError |
|
844 | 844 | # we need to store a bit of state in the tokenizer to build |
|
845 | 845 | # dotted names |
|
846 | 846 | tokeneater.name_cont = False |
|
847 | 847 | |
|
848 | 848 | def linereader(file=file, lnum=[lnum], getline=linecache.getline): |
|
849 | 849 | line = getline(file, lnum[0]) |
|
850 | 850 | lnum[0] += 1 |
|
851 | 851 | return line |
|
852 | 852 | |
|
853 | 853 | # Build the list of names on this line of code where the exception |
|
854 | 854 | # occurred. |
|
855 | 855 | try: |
|
856 | 856 | # This builds the names list in-place by capturing it from the |
|
857 | 857 | # enclosing scope. |
|
858 | 858 | tokenize.tokenize(linereader, tokeneater) |
|
859 | 859 | except IndexError: |
|
860 | 860 | # signals exit of tokenizer |
|
861 | 861 | pass |
|
862 | 862 | except tokenize.TokenError,msg: |
|
863 | 863 | _m = ("An unexpected error occurred while tokenizing input\n" |
|
864 | 864 | "The following traceback may be corrupted or invalid\n" |
|
865 | 865 | "The error message is: %s\n" % msg) |
|
866 | 866 | error(_m) |
|
867 | 867 | |
|
868 | 868 | # prune names list of duplicates, but keep the right order |
|
869 | 869 | unique_names = uniq_stable(names) |
|
870 | 870 | |
|
871 | 871 | # Start loop over vars |
|
872 | 872 | lvals = [] |
|
873 | 873 | if self.include_vars: |
|
874 | 874 | for name_full in unique_names: |
|
875 | 875 | name_base = name_full.split('.',1)[0] |
|
876 | 876 | if name_base in frame.f_code.co_varnames: |
|
877 | 877 | if locals.has_key(name_base): |
|
878 | 878 | try: |
|
879 | 879 | value = repr(eval(name_full,locals)) |
|
880 | 880 | except: |
|
881 | 881 | value = undefined |
|
882 | 882 | else: |
|
883 | 883 | value = undefined |
|
884 | 884 | name = tpl_local_var % name_full |
|
885 | 885 | else: |
|
886 | 886 | if frame.f_globals.has_key(name_base): |
|
887 | 887 | try: |
|
888 | 888 | value = repr(eval(name_full,frame.f_globals)) |
|
889 | 889 | except: |
|
890 | 890 | value = undefined |
|
891 | 891 | else: |
|
892 | 892 | value = undefined |
|
893 | 893 | name = tpl_global_var % name_full |
|
894 | 894 | lvals.append(tpl_name_val % (name,value)) |
|
895 | 895 | if lvals: |
|
896 | 896 | lvals = '%s%s' % (indent,em_normal.join(lvals)) |
|
897 | 897 | else: |
|
898 | 898 | lvals = '' |
|
899 | 899 | |
|
900 | 900 | level = '%s %s\n' % (link,call) |
|
901 | 901 | |
|
902 | 902 | if index is None: |
|
903 | 903 | frames.append(level) |
|
904 | 904 | else: |
|
905 | 905 | frames.append('%s%s' % (level,''.join( |
|
906 | 906 | _format_traceback_lines(lnum,index,lines,Colors,lvals, |
|
907 | 907 | col_scheme)))) |
|
908 | 908 | |
|
909 | 909 | # Get (safely) a string form of the exception info |
|
910 | 910 | try: |
|
911 | 911 | etype_str,evalue_str = map(str,(etype,evalue)) |
|
912 | 912 | except: |
|
913 | 913 | # User exception is improperly defined. |
|
914 | 914 | etype,evalue = str,sys.exc_info()[:2] |
|
915 | 915 | etype_str,evalue_str = map(str,(etype,evalue)) |
|
916 | 916 | # ... and format it |
|
917 | 917 | exception = ['%s%s%s: %s' % (Colors.excName, etype_str, |
|
918 | 918 | ColorsNormal, evalue_str)] |
|
919 | 919 | if type(evalue) is types.InstanceType: |
|
920 | 920 | try: |
|
921 | 921 | names = [w for w in dir(evalue) if isinstance(w, basestring)] |
|
922 | 922 | except: |
|
923 | 923 | # Every now and then, an object with funny inernals blows up |
|
924 | 924 | # when dir() is called on it. We do the best we can to report |
|
925 | 925 | # the problem and continue |
|
926 | 926 | _m = '%sException reporting error (object with broken dir())%s:' |
|
927 | 927 | exception.append(_m % (Colors.excName,ColorsNormal)) |
|
928 | 928 | etype_str,evalue_str = map(str,sys.exc_info()[:2]) |
|
929 | 929 | exception.append('%s%s%s: %s' % (Colors.excName,etype_str, |
|
930 | 930 | ColorsNormal, evalue_str)) |
|
931 | 931 | names = [] |
|
932 | 932 | for name in names: |
|
933 | 933 | value = text_repr(getattr(evalue, name)) |
|
934 | 934 | exception.append('\n%s%s = %s' % (indent, name, value)) |
|
935 | 935 | |
|
936 | 936 | # vds: >> |
|
937 | 937 | if records: |
|
938 | 938 | filepath, lnum = records[-1][1:3] |
|
939 | 939 | #print "file:", str(file), "linenb", str(lnum) # dbg |
|
940 | 940 | filepath = os.path.abspath(filepath) |
|
941 | 941 | ipinst = ipapi.get() |
|
942 | 942 | if ipinst is not None: |
|
943 | 943 | ipinst.hooks.synchronize_with_editor(filepath, lnum, 0) |
|
944 | 944 | # vds: << |
|
945 | 945 | |
|
946 | 946 | # return all our info assembled as a single string |
|
947 | 947 | # return '%s\n\n%s\n%s' % (head,'\n'.join(frames),''.join(exception[0]) ) |
|
948 | 948 | return [head] + frames + [''.join(exception[0])] |
|
949 | 949 | |
|
950 | 950 | def debugger(self,force=False): |
|
951 | 951 | """Call up the pdb debugger if desired, always clean up the tb |
|
952 | 952 | reference. |
|
953 | 953 | |
|
954 | 954 | Keywords: |
|
955 | 955 | |
|
956 | 956 | - force(False): by default, this routine checks the instance call_pdb |
|
957 | 957 | flag and does not actually invoke the debugger if the flag is false. |
|
958 | 958 | The 'force' option forces the debugger to activate even if the flag |
|
959 | 959 | is false. |
|
960 | 960 | |
|
961 | 961 | If the call_pdb flag is set, the pdb interactive debugger is |
|
962 | 962 | invoked. In all cases, the self.tb reference to the current traceback |
|
963 | 963 | is deleted to prevent lingering references which hamper memory |
|
964 | 964 | management. |
|
965 | 965 | |
|
966 | 966 | Note that each call to pdb() does an 'import readline', so if your app |
|
967 | 967 | requires a special setup for the readline completers, you'll have to |
|
968 | 968 | fix that by hand after invoking the exception handler.""" |
|
969 | 969 | |
|
970 | 970 | if force or self.call_pdb: |
|
971 | 971 | if self.pdb is None: |
|
972 | 972 | self.pdb = debugger.Pdb( |
|
973 | 973 | self.color_scheme_table.active_scheme_name) |
|
974 | 974 | # the system displayhook may have changed, restore the original |
|
975 | 975 | # for pdb |
|
976 | 976 | display_trap = DisplayTrap(hook=sys.__displayhook__) |
|
977 | 977 | with display_trap: |
|
978 | 978 | self.pdb.reset() |
|
979 | 979 | # Find the right frame so we don't pop up inside ipython itself |
|
980 | 980 | if hasattr(self,'tb') and self.tb is not None: |
|
981 | 981 | etb = self.tb |
|
982 | 982 | else: |
|
983 | 983 | etb = self.tb = sys.last_traceback |
|
984 | 984 | while self.tb is not None and self.tb.tb_next is not None: |
|
985 | 985 | self.tb = self.tb.tb_next |
|
986 | 986 | if etb and etb.tb_next: |
|
987 | 987 | etb = etb.tb_next |
|
988 | 988 | self.pdb.botframe = etb.tb_frame |
|
989 | 989 | self.pdb.interaction(self.tb.tb_frame, self.tb) |
|
990 | 990 | |
|
991 | 991 | if hasattr(self,'tb'): |
|
992 | 992 | del self.tb |
|
993 | 993 | |
|
994 | 994 | def handler(self, info=None): |
|
995 | 995 | (etype, evalue, etb) = info or sys.exc_info() |
|
996 | 996 | self.tb = etb |
|
997 | 997 | ostream = self.ostream |
|
998 | 998 | ostream.flush() |
|
999 | 999 | ostream.write(self.text(etype, evalue, etb)) |
|
1000 | 1000 | ostream.write('\n') |
|
1001 | 1001 | ostream.flush() |
|
1002 | 1002 | |
|
1003 | 1003 | # Changed so an instance can just be called as VerboseTB_inst() and print |
|
1004 | 1004 | # out the right info on its own. |
|
1005 | 1005 | def __call__(self, etype=None, evalue=None, etb=None): |
|
1006 | 1006 | """This hook can replace sys.excepthook (for Python 2.1 or higher).""" |
|
1007 | 1007 | if etb is None: |
|
1008 | 1008 | self.handler() |
|
1009 | 1009 | else: |
|
1010 | 1010 | self.handler((etype, evalue, etb)) |
|
1011 | 1011 | try: |
|
1012 | 1012 | self.debugger() |
|
1013 | 1013 | except KeyboardInterrupt: |
|
1014 | 1014 | print "\nKeyboardInterrupt" |
|
1015 | 1015 | |
|
1016 | 1016 | #---------------------------------------------------------------------------- |
|
1017 | 1017 | class FormattedTB(VerboseTB, ListTB): |
|
1018 | 1018 | """Subclass ListTB but allow calling with a traceback. |
|
1019 | 1019 | |
|
1020 | 1020 | It can thus be used as a sys.excepthook for Python > 2.1. |
|
1021 | 1021 | |
|
1022 | 1022 | Also adds 'Context' and 'Verbose' modes, not available in ListTB. |
|
1023 | 1023 | |
|
1024 | 1024 | Allows a tb_offset to be specified. This is useful for situations where |
|
1025 | 1025 | one needs to remove a number of topmost frames from the traceback (such as |
|
1026 | 1026 | occurs with python programs that themselves execute other python code, |
|
1027 | 1027 | like Python shells). """ |
|
1028 | 1028 | |
|
1029 | 1029 | def __init__(self, mode='Plain', color_scheme='Linux', call_pdb=False, |
|
1030 | 1030 | ostream=None, |
|
1031 | 1031 | tb_offset=0, long_header=False, include_vars=False): |
|
1032 | 1032 | |
|
1033 | 1033 | # NEVER change the order of this list. Put new modes at the end: |
|
1034 | 1034 | self.valid_modes = ['Plain','Context','Verbose'] |
|
1035 | 1035 | self.verbose_modes = self.valid_modes[1:3] |
|
1036 | 1036 | |
|
1037 | 1037 | VerboseTB.__init__(self, color_scheme=color_scheme, call_pdb=call_pdb, |
|
1038 | 1038 | ostream=ostream, tb_offset=tb_offset, |
|
1039 | 1039 | long_header=long_header, include_vars=include_vars) |
|
1040 | 1040 | |
|
1041 | 1041 | # Different types of tracebacks are joined with different separators to |
|
1042 | 1042 | # form a single string. They are taken from this dict |
|
1043 | 1043 | self._join_chars = dict(Plain='', Context='\n', Verbose='\n') |
|
1044 | 1044 | # set_mode also sets the tb_join_char attribute |
|
1045 | 1045 | self.set_mode(mode) |
|
1046 | 1046 | |
|
1047 | 1047 | def _extract_tb(self,tb): |
|
1048 | 1048 | if tb: |
|
1049 | 1049 | return traceback.extract_tb(tb) |
|
1050 | 1050 | else: |
|
1051 | 1051 | return None |
|
1052 | 1052 | |
|
1053 | 1053 | def structured_traceback(self, etype, value, tb, tb_offset=None, context=5): |
|
1054 | 1054 | tb_offset = self.tb_offset if tb_offset is None else tb_offset |
|
1055 | 1055 | mode = self.mode |
|
1056 | 1056 | if mode in self.verbose_modes: |
|
1057 | 1057 | # Verbose modes need a full traceback |
|
1058 | 1058 | return VerboseTB.structured_traceback( |
|
1059 | 1059 | self, etype, value, tb, tb_offset, context |
|
1060 | 1060 | ) |
|
1061 | 1061 | else: |
|
1062 | 1062 | # We must check the source cache because otherwise we can print |
|
1063 | 1063 | # out-of-date source code. |
|
1064 | 1064 | linecache.checkcache() |
|
1065 | 1065 | # Now we can extract and format the exception |
|
1066 | 1066 | elist = self._extract_tb(tb) |
|
1067 | 1067 | return ListTB.structured_traceback( |
|
1068 | 1068 | self, etype, value, elist, tb_offset, context |
|
1069 | 1069 | ) |
|
1070 | 1070 | |
|
1071 | 1071 | def stb2text(self, stb): |
|
1072 | 1072 | """Convert a structured traceback (a list) to a string.""" |
|
1073 | 1073 | return self.tb_join_char.join(stb) |
|
1074 | 1074 | |
|
1075 | 1075 | |
|
1076 | 1076 | def set_mode(self,mode=None): |
|
1077 | 1077 | """Switch to the desired mode. |
|
1078 | 1078 | |
|
1079 | 1079 | If mode is not specified, cycles through the available modes.""" |
|
1080 | 1080 | |
|
1081 | 1081 | if not mode: |
|
1082 | 1082 | new_idx = ( self.valid_modes.index(self.mode) + 1 ) % \ |
|
1083 | 1083 | len(self.valid_modes) |
|
1084 | 1084 | self.mode = self.valid_modes[new_idx] |
|
1085 | 1085 | elif mode not in self.valid_modes: |
|
1086 | 1086 | raise ValueError, 'Unrecognized mode in FormattedTB: <'+mode+'>\n'\ |
|
1087 | 1087 | 'Valid modes: '+str(self.valid_modes) |
|
1088 | 1088 | else: |
|
1089 | 1089 | self.mode = mode |
|
1090 | 1090 | # include variable details only in 'Verbose' mode |
|
1091 | 1091 | self.include_vars = (self.mode == self.valid_modes[2]) |
|
1092 | 1092 | # Set the join character for generating text tracebacks |
|
1093 | 1093 | self.tb_join_char = self._join_chars[mode] |
|
1094 | 1094 | |
|
1095 | 1095 | # some convenient shorcuts |
|
1096 | 1096 | def plain(self): |
|
1097 | 1097 | self.set_mode(self.valid_modes[0]) |
|
1098 | 1098 | |
|
1099 | 1099 | def context(self): |
|
1100 | 1100 | self.set_mode(self.valid_modes[1]) |
|
1101 | 1101 | |
|
1102 | 1102 | def verbose(self): |
|
1103 | 1103 | self.set_mode(self.valid_modes[2]) |
|
1104 | 1104 | |
|
1105 | 1105 | #---------------------------------------------------------------------------- |
|
1106 | 1106 | class AutoFormattedTB(FormattedTB): |
|
1107 | 1107 | """A traceback printer which can be called on the fly. |
|
1108 | 1108 | |
|
1109 | 1109 | It will find out about exceptions by itself. |
|
1110 | 1110 | |
|
1111 | 1111 | A brief example: |
|
1112 | 1112 | |
|
1113 | 1113 | AutoTB = AutoFormattedTB(mode = 'Verbose',color_scheme='Linux') |
|
1114 | 1114 | try: |
|
1115 | 1115 | ... |
|
1116 | 1116 | except: |
|
1117 | 1117 | AutoTB() # or AutoTB(out=logfile) where logfile is an open file object |
|
1118 | 1118 | """ |
|
1119 | 1119 | |
|
1120 | 1120 | def __call__(self,etype=None,evalue=None,etb=None, |
|
1121 | 1121 | out=None,tb_offset=None): |
|
1122 | 1122 | """Print out a formatted exception traceback. |
|
1123 | 1123 | |
|
1124 | 1124 | Optional arguments: |
|
1125 | 1125 | - out: an open file-like object to direct output to. |
|
1126 | 1126 | |
|
1127 | 1127 | - tb_offset: the number of frames to skip over in the stack, on a |
|
1128 | 1128 | per-call basis (this overrides temporarily the instance's tb_offset |
|
1129 | 1129 | given at initialization time. """ |
|
1130 | 1130 | |
|
1131 | 1131 | |
|
1132 | 1132 | if out is None: |
|
1133 | 1133 | out = self.ostream |
|
1134 | 1134 | out.flush() |
|
1135 | 1135 | out.write(self.text(etype, evalue, etb, tb_offset)) |
|
1136 | 1136 | out.write('\n') |
|
1137 | 1137 | out.flush() |
|
1138 | 1138 | # FIXME: we should remove the auto pdb behavior from here and leave |
|
1139 | 1139 | # that to the clients. |
|
1140 | 1140 | try: |
|
1141 | 1141 | self.debugger() |
|
1142 | 1142 | except KeyboardInterrupt: |
|
1143 | 1143 | print "\nKeyboardInterrupt" |
|
1144 | 1144 | |
|
1145 | 1145 | def structured_traceback(self, etype=None, value=None, tb=None, |
|
1146 | 1146 | tb_offset=None, context=5): |
|
1147 | 1147 | if etype is None: |
|
1148 | 1148 | etype,value,tb = sys.exc_info() |
|
1149 | 1149 | self.tb = tb |
|
1150 | 1150 | return FormattedTB.structured_traceback( |
|
1151 | 1151 | self, etype, value, tb, tb_offset, context) |
|
1152 | 1152 | |
|
1153 | 1153 | #--------------------------------------------------------------------------- |
|
1154 | 1154 | |
|
1155 | 1155 | # A simple class to preserve Nathan's original functionality. |
|
1156 | 1156 | class ColorTB(FormattedTB): |
|
1157 | 1157 | """Shorthand to initialize a FormattedTB in Linux colors mode.""" |
|
1158 | 1158 | def __init__(self,color_scheme='Linux',call_pdb=0): |
|
1159 | 1159 | FormattedTB.__init__(self,color_scheme=color_scheme, |
|
1160 | 1160 | call_pdb=call_pdb) |
|
1161 | 1161 | |
|
1162 | 1162 | |
|
1163 | 1163 | class SyntaxTB(ListTB): |
|
1164 | 1164 | """Extension which holds some state: the last exception value""" |
|
1165 | 1165 | |
|
1166 | 1166 | def __init__(self,color_scheme = 'NoColor'): |
|
1167 | 1167 | ListTB.__init__(self,color_scheme) |
|
1168 | 1168 | self.last_syntax_error = None |
|
1169 | 1169 | |
|
1170 | 1170 | def __call__(self, etype, value, elist): |
|
1171 | 1171 | self.last_syntax_error = value |
|
1172 | 1172 | ListTB.__call__(self,etype,value,elist) |
|
1173 | 1173 | |
|
1174 | 1174 | def clear_err_state(self): |
|
1175 | 1175 | """Return the current error state and clear it""" |
|
1176 | 1176 | e = self.last_syntax_error |
|
1177 | 1177 | self.last_syntax_error = None |
|
1178 | 1178 | return e |
|
1179 | 1179 | |
|
1180 | 1180 | def stb2text(self, stb): |
|
1181 | 1181 | """Convert a structured traceback (a list) to a string.""" |
|
1182 | 1182 | return ''.join(stb) |
|
1183 | 1183 | |
|
1184 | 1184 | |
|
1185 | 1185 | #---------------------------------------------------------------------------- |
|
1186 | 1186 | # module testing (minimal) |
|
1187 | 1187 | if __name__ == "__main__": |
|
1188 | 1188 | def spam(c, (d, e)): |
|
1189 | 1189 | x = c + d |
|
1190 | 1190 | y = c * d |
|
1191 | 1191 | foo(x, y) |
|
1192 | 1192 | |
|
1193 | 1193 | def foo(a, b, bar=1): |
|
1194 | 1194 | eggs(a, b + bar) |
|
1195 | 1195 | |
|
1196 | 1196 | def eggs(f, g, z=globals()): |
|
1197 | 1197 | h = f + g |
|
1198 | 1198 | i = f - g |
|
1199 | 1199 | return h / i |
|
1200 | 1200 | |
|
1201 | 1201 | print '' |
|
1202 | 1202 | print '*** Before ***' |
|
1203 | 1203 | try: |
|
1204 | 1204 | print spam(1, (2, 3)) |
|
1205 | 1205 | except: |
|
1206 | 1206 | traceback.print_exc() |
|
1207 | 1207 | print '' |
|
1208 | 1208 | |
|
1209 | 1209 | handler = ColorTB() |
|
1210 | 1210 | print '*** ColorTB ***' |
|
1211 | 1211 | try: |
|
1212 | 1212 | print spam(1, (2, 3)) |
|
1213 | 1213 | except: |
|
1214 | 1214 | apply(handler, sys.exc_info() ) |
|
1215 | 1215 | print '' |
|
1216 | 1216 | |
|
1217 | 1217 | handler = VerboseTB() |
|
1218 | 1218 | print '*** VerboseTB ***' |
|
1219 | 1219 | try: |
|
1220 | 1220 | print spam(1, (2, 3)) |
|
1221 | 1221 | except: |
|
1222 | 1222 | apply(handler, sys.exc_info() ) |
|
1223 | 1223 | print '' |
|
1224 | 1224 |
@@ -1,276 +1,275 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """String interpolation for Python (by Ka-Ping Yee, 14 Feb 2000). |
|
3 | 3 | |
|
4 | 4 | This module lets you quickly and conveniently interpolate values into |
|
5 | 5 | strings (in the flavour of Perl or Tcl, but with less extraneous |
|
6 | 6 | punctuation). You get a bit more power than in the other languages, |
|
7 | 7 | because this module allows subscripting, slicing, function calls, |
|
8 | 8 | attribute lookup, or arbitrary expressions. Variables and expressions |
|
9 | 9 | are evaluated in the namespace of the caller. |
|
10 | 10 | |
|
11 | 11 | The itpl() function returns the result of interpolating a string, and |
|
12 | 12 | printpl() prints out an interpolated string. Here are some examples: |
|
13 | 13 | |
|
14 | 14 | from Itpl import printpl |
|
15 | 15 | printpl("Here is a $string.") |
|
16 | 16 | printpl("Here is a $module.member.") |
|
17 | 17 | printpl("Here is an $object.member.") |
|
18 | 18 | printpl("Here is a $functioncall(with, arguments).") |
|
19 | 19 | printpl("Here is an ${arbitrary + expression}.") |
|
20 | 20 | printpl("Here is an $array[3] member.") |
|
21 | 21 | printpl("Here is a $dictionary['member'].") |
|
22 | 22 | |
|
23 | 23 | The filter() function filters a file object so that output through it |
|
24 | 24 | is interpolated. This lets you produce the illusion that Python knows |
|
25 | 25 | how to do interpolation: |
|
26 | 26 | |
|
27 | 27 | import Itpl |
|
28 | 28 | sys.stdout = Itpl.filter() |
|
29 | 29 | f = "fancy" |
|
30 | 30 | print "Is this not $f?" |
|
31 | 31 | print "Standard output has been replaced with a $sys.stdout object." |
|
32 | 32 | sys.stdout = Itpl.unfilter() |
|
33 | 33 | print "Okay, back $to $normal." |
|
34 | 34 | |
|
35 | 35 | Under the hood, the Itpl class represents a string that knows how to |
|
36 | 36 | interpolate values. An instance of the class parses the string once |
|
37 | 37 | upon initialization; the evaluation and substitution can then be done |
|
38 | 38 | each time the instance is evaluated with str(instance). For example: |
|
39 | 39 | |
|
40 | 40 | from Itpl import Itpl |
|
41 | 41 | s = Itpl("Here is $foo.") |
|
42 | 42 | foo = 5 |
|
43 | 43 | print str(s) |
|
44 | 44 | foo = "bar" |
|
45 | 45 | print str(s) |
|
46 | 46 | """ |
|
47 | 47 | |
|
48 | 48 | #***************************************************************************** |
|
49 | 49 | # |
|
50 | 50 | # Copyright (c) 2001 Ka-Ping Yee <ping@lfw.org> |
|
51 | 51 | # |
|
52 | 52 | # |
|
53 | 53 | # Published under the terms of the MIT license, hereby reproduced: |
|
54 | 54 | # |
|
55 | 55 | # Permission is hereby granted, free of charge, to any person obtaining a copy |
|
56 | 56 | # of this software and associated documentation files (the "Software"), to |
|
57 | 57 | # deal in the Software without restriction, including without limitation the |
|
58 | 58 | # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or |
|
59 | 59 | # sell copies of the Software, and to permit persons to whom the Software is |
|
60 | 60 | # furnished to do so, subject to the following conditions: |
|
61 | 61 | # |
|
62 | 62 | # The above copyright notice and this permission notice shall be included in |
|
63 | 63 | # all copies or substantial portions of the Software. |
|
64 | 64 | # |
|
65 | 65 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
|
66 | 66 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
|
67 | 67 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE |
|
68 | 68 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER |
|
69 | 69 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING |
|
70 | 70 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS |
|
71 | 71 | # IN THE SOFTWARE. |
|
72 | 72 | # |
|
73 | 73 | #***************************************************************************** |
|
74 | 74 | |
|
75 | 75 | __author__ = 'Ka-Ping Yee <ping@lfw.org>' |
|
76 | 76 | __license__ = 'MIT' |
|
77 | 77 | |
|
78 | 78 | import string |
|
79 | 79 | import sys |
|
80 | 80 | from tokenize import tokenprog |
|
81 | from types import StringType | |
|
82 | 81 | |
|
83 | 82 | class ItplError(ValueError): |
|
84 | 83 | def __init__(self, text, pos): |
|
85 | 84 | self.text = text |
|
86 | 85 | self.pos = pos |
|
87 | 86 | def __str__(self): |
|
88 | 87 | return "unfinished expression in %s at char %d" % ( |
|
89 | 88 | repr(self.text), self.pos) |
|
90 | 89 | |
|
91 | 90 | def matchorfail(text, pos): |
|
92 | 91 | match = tokenprog.match(text, pos) |
|
93 | 92 | if match is None: |
|
94 | 93 | raise ItplError(text, pos) |
|
95 | 94 | return match, match.end() |
|
96 | 95 | |
|
97 | 96 | class Itpl: |
|
98 | 97 | """Class representing a string with interpolation abilities. |
|
99 | 98 | |
|
100 | 99 | Upon creation, an instance works out what parts of the format |
|
101 | 100 | string are literal and what parts need to be evaluated. The |
|
102 | 101 | evaluation and substitution happens in the namespace of the |
|
103 | 102 | caller when str(instance) is called.""" |
|
104 | 103 | |
|
105 | 104 | def __init__(self, format,codec='utf_8',encoding_errors='backslashreplace'): |
|
106 | 105 | """The single mandatory argument to this constructor is a format |
|
107 | 106 | string. |
|
108 | 107 | |
|
109 | 108 | The format string is parsed according to the following rules: |
|
110 | 109 | |
|
111 | 110 | 1. A dollar sign and a name, possibly followed by any of: |
|
112 | 111 | - an open-paren, and anything up to the matching paren |
|
113 | 112 | - an open-bracket, and anything up to the matching bracket |
|
114 | 113 | - a period and a name |
|
115 | 114 | any number of times, is evaluated as a Python expression. |
|
116 | 115 | |
|
117 | 116 | 2. A dollar sign immediately followed by an open-brace, and |
|
118 | 117 | anything up to the matching close-brace, is evaluated as |
|
119 | 118 | a Python expression. |
|
120 | 119 | |
|
121 | 120 | 3. Outside of the expressions described in the above two rules, |
|
122 | 121 | two dollar signs in a row give you one literal dollar sign. |
|
123 | 122 | |
|
124 | 123 | Optional arguments: |
|
125 | 124 | |
|
126 | 125 | - codec('utf_8'): a string containing the name of a valid Python |
|
127 | 126 | codec. |
|
128 | 127 | |
|
129 | 128 | - encoding_errors('backslashreplace'): a string with a valid error handling |
|
130 | 129 | policy. See the codecs module documentation for details. |
|
131 | 130 | |
|
132 | 131 | These are used to encode the format string if a call to str() fails on |
|
133 | 132 | the expanded result.""" |
|
134 | 133 | |
|
135 | 134 | if not isinstance(format,basestring): |
|
136 | 135 | raise TypeError, "needs string initializer" |
|
137 | 136 | self.format = format |
|
138 | 137 | self.codec = codec |
|
139 | 138 | self.encoding_errors = encoding_errors |
|
140 | 139 | |
|
141 | 140 | namechars = "abcdefghijklmnopqrstuvwxyz" \ |
|
142 | 141 | "ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_"; |
|
143 | 142 | chunks = [] |
|
144 | 143 | pos = 0 |
|
145 | 144 | |
|
146 | 145 | while 1: |
|
147 |
dollar = |
|
|
146 | dollar = format.find("$", pos) | |
|
148 | 147 | if dollar < 0: break |
|
149 | 148 | nextchar = format[dollar+1] |
|
150 | 149 | |
|
151 | 150 | if nextchar == "{": |
|
152 | 151 | chunks.append((0, format[pos:dollar])) |
|
153 | 152 | pos, level = dollar+2, 1 |
|
154 | 153 | while level: |
|
155 | 154 | match, pos = matchorfail(format, pos) |
|
156 | 155 | tstart, tend = match.regs[3] |
|
157 | 156 | token = format[tstart:tend] |
|
158 | 157 | if token == "{": level = level+1 |
|
159 | 158 | elif token == "}": level = level-1 |
|
160 | 159 | chunks.append((1, format[dollar+2:pos-1])) |
|
161 | 160 | |
|
162 | 161 | elif nextchar in namechars: |
|
163 | 162 | chunks.append((0, format[pos:dollar])) |
|
164 | 163 | match, pos = matchorfail(format, dollar+1) |
|
165 | 164 | while pos < len(format): |
|
166 | 165 | if format[pos] == "." and \ |
|
167 | 166 | pos+1 < len(format) and format[pos+1] in namechars: |
|
168 | 167 | match, pos = matchorfail(format, pos+1) |
|
169 | 168 | elif format[pos] in "([": |
|
170 | 169 | pos, level = pos+1, 1 |
|
171 | 170 | while level: |
|
172 | 171 | match, pos = matchorfail(format, pos) |
|
173 | 172 | tstart, tend = match.regs[3] |
|
174 | 173 | token = format[tstart:tend] |
|
175 | 174 | if token[0] in "([": level = level+1 |
|
176 | 175 | elif token[0] in ")]": level = level-1 |
|
177 | 176 | else: break |
|
178 | 177 | chunks.append((1, format[dollar+1:pos])) |
|
179 | 178 | |
|
180 | 179 | else: |
|
181 | 180 | chunks.append((0, format[pos:dollar+1])) |
|
182 | 181 | pos = dollar + 1 + (nextchar == "$") |
|
183 | 182 | |
|
184 | 183 | if pos < len(format): chunks.append((0, format[pos:])) |
|
185 | 184 | self.chunks = chunks |
|
186 | 185 | |
|
187 | 186 | def __repr__(self): |
|
188 | 187 | return "<Itpl %s >" % repr(self.format) |
|
189 | 188 | |
|
190 | 189 | def _str(self,glob,loc): |
|
191 | 190 | """Evaluate to a string in the given globals/locals. |
|
192 | 191 | |
|
193 | 192 | The final output is built by calling str(), but if this fails, the |
|
194 | 193 | result is encoded with the instance's codec and error handling policy, |
|
195 | 194 | via a call to out.encode(self.codec,self.encoding_errors)""" |
|
196 | 195 | result = [] |
|
197 | 196 | app = result.append |
|
198 | 197 | for live, chunk in self.chunks: |
|
199 | 198 | if live: app(str(eval(chunk,glob,loc))) |
|
200 | 199 | else: app(chunk) |
|
201 | 200 | out = ''.join(result) |
|
202 | 201 | try: |
|
203 | 202 | return str(out) |
|
204 | 203 | except UnicodeError: |
|
205 | 204 | return out.encode(self.codec,self.encoding_errors) |
|
206 | 205 | |
|
207 | 206 | def __str__(self): |
|
208 | 207 | """Evaluate and substitute the appropriate parts of the string.""" |
|
209 | 208 | |
|
210 | 209 | # We need to skip enough frames to get to the actual caller outside of |
|
211 | 210 | # Itpl. |
|
212 | 211 | frame = sys._getframe(1) |
|
213 | 212 | while frame.f_globals["__name__"] == __name__: frame = frame.f_back |
|
214 | 213 | loc, glob = frame.f_locals, frame.f_globals |
|
215 | 214 | |
|
216 | 215 | return self._str(glob,loc) |
|
217 | 216 | |
|
218 | 217 | class ItplNS(Itpl): |
|
219 | 218 | """Class representing a string with interpolation abilities. |
|
220 | 219 | |
|
221 | 220 | This inherits from Itpl, but at creation time a namespace is provided |
|
222 | 221 | where the evaluation will occur. The interpolation becomes a bit more |
|
223 | 222 | efficient, as no traceback needs to be extracte. It also allows the |
|
224 | 223 | caller to supply a different namespace for the interpolation to occur than |
|
225 | 224 | its own.""" |
|
226 | 225 | |
|
227 | 226 | def __init__(self, format,globals,locals=None, |
|
228 | 227 | codec='utf_8',encoding_errors='backslashreplace'): |
|
229 | 228 | """ItplNS(format,globals[,locals]) -> interpolating string instance. |
|
230 | 229 | |
|
231 | 230 | This constructor, besides a format string, takes a globals dictionary |
|
232 | 231 | and optionally a locals (which defaults to globals if not provided). |
|
233 | 232 | |
|
234 | 233 | For further details, see the Itpl constructor.""" |
|
235 | 234 | |
|
236 | 235 | if locals is None: |
|
237 | 236 | locals = globals |
|
238 | 237 | self.globals = globals |
|
239 | 238 | self.locals = locals |
|
240 | 239 | Itpl.__init__(self,format,codec,encoding_errors) |
|
241 | 240 | |
|
242 | 241 | def __str__(self): |
|
243 | 242 | """Evaluate and substitute the appropriate parts of the string.""" |
|
244 | 243 | return self._str(self.globals,self.locals) |
|
245 | 244 | |
|
246 | 245 | def __repr__(self): |
|
247 | 246 | return "<ItplNS %s >" % repr(self.format) |
|
248 | 247 | |
|
249 | 248 | # utilities for fast printing |
|
250 | 249 | def itpl(text): return str(Itpl(text)) |
|
251 | 250 | def printpl(text): print itpl(text) |
|
252 | 251 | # versions with namespace |
|
253 | 252 | def itplns(text,globals,locals=None): return str(ItplNS(text,globals,locals)) |
|
254 | 253 | def printplns(text,globals,locals=None): print itplns(text,globals,locals) |
|
255 | 254 | |
|
256 | 255 | class ItplFile: |
|
257 | 256 | """A file object that filters each write() through an interpolator.""" |
|
258 | 257 | def __init__(self, file): self.file = file |
|
259 | 258 | def __repr__(self): return "<interpolated " + repr(self.file) + ">" |
|
260 | 259 | def __getattr__(self, attr): return getattr(self.file, attr) |
|
261 | 260 | def write(self, text): self.file.write(str(Itpl(text))) |
|
262 | 261 | |
|
263 | 262 | def filter(file=sys.stdout): |
|
264 | 263 | """Return an ItplFile that filters writes to the given file object. |
|
265 | 264 | |
|
266 | 265 | 'file = filter(file)' replaces 'file' with a filtered object that |
|
267 | 266 | has a write() method. When called with no argument, this creates |
|
268 | 267 | a filter to sys.stdout.""" |
|
269 | 268 | return ItplFile(file) |
|
270 | 269 | |
|
271 | 270 | def unfilter(ifile=None): |
|
272 | 271 | """Return the original file that corresponds to the given ItplFile. |
|
273 | 272 | |
|
274 | 273 | 'file = unfilter(file)' undoes the effect of 'file = filter(file)'. |
|
275 | 274 | 'sys.stdout = unfilter()' undoes the effect of 'sys.stdout = filter()'.""" |
|
276 | 275 | return ifile and ifile.file or sys.stdout.file |
@@ -1,2501 +1,2472 b'' | |||
|
1 | # -*- coding: utf-8 -*- | |
|
1 | 2 | # configobj.py |
|
2 | 3 | # A config file reader/writer that supports nested sections in config files. |
|
3 | 4 | # Copyright (C) 2005-2008 Michael Foord, Nicola Larosa |
|
4 | 5 | # E-mail: fuzzyman AT voidspace DOT org DOT uk |
|
5 | 6 | # nico AT tekNico DOT net |
|
6 | 7 | |
|
7 | 8 | # ConfigObj 4 |
|
8 | 9 | # http://www.voidspace.org.uk/python/configobj.html |
|
9 | 10 | |
|
10 | 11 | # Released subject to the BSD License |
|
11 | 12 | # Please see http://www.voidspace.org.uk/python/license.shtml |
|
12 | 13 | |
|
13 | 14 | # Scripts maintained at http://www.voidspace.org.uk/python/index.shtml |
|
14 | 15 | # For information about bugfixes, updates and support, please join the |
|
15 | 16 | # ConfigObj mailing list: |
|
16 | 17 | # http://lists.sourceforge.net/lists/listinfo/configobj-develop |
|
17 | 18 | # Comments, suggestions and bug reports welcome. |
|
18 | 19 | |
|
19 | 20 | from __future__ import generators |
|
20 | 21 | |
|
21 | 22 | import sys |
|
22 | 23 | INTP_VER = sys.version_info[:2] |
|
23 | 24 | if INTP_VER < (2, 2): |
|
24 | 25 | raise RuntimeError("Python v.2.2 or later needed") |
|
25 | 26 | |
|
26 | 27 | import os, re |
|
27 | 28 | compiler = None |
|
28 | 29 | try: |
|
29 | 30 | import compiler |
|
30 | 31 | except ImportError: |
|
31 | 32 | # for IronPython |
|
32 | 33 | pass |
|
33 | 34 | from types import StringTypes |
|
34 | 35 | from warnings import warn |
|
35 | try: | |
|
36 | from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE | |
|
37 | except ImportError: | |
|
38 | # Python 2.2 does not have these | |
|
39 | # UTF-8 | |
|
40 | BOM_UTF8 = '\xef\xbb\xbf' | |
|
41 | # UTF-16, little endian | |
|
42 | BOM_UTF16_LE = '\xff\xfe' | |
|
43 | # UTF-16, big endian | |
|
44 | BOM_UTF16_BE = '\xfe\xff' | |
|
45 | if sys.byteorder == 'little': | |
|
46 | # UTF-16, native endianness | |
|
47 | BOM_UTF16 = BOM_UTF16_LE | |
|
48 | else: | |
|
49 | # UTF-16, native endianness | |
|
50 | BOM_UTF16 = BOM_UTF16_BE | |
|
36 | from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE | |
|
51 | 37 | |
|
52 | 38 | # A dictionary mapping BOM to |
|
53 | 39 | # the encoding to decode with, and what to set the |
|
54 | 40 | # encoding attribute to. |
|
55 | 41 | BOMS = { |
|
56 | 42 | BOM_UTF8: ('utf_8', None), |
|
57 | 43 | BOM_UTF16_BE: ('utf16_be', 'utf_16'), |
|
58 | 44 | BOM_UTF16_LE: ('utf16_le', 'utf_16'), |
|
59 | 45 | BOM_UTF16: ('utf_16', 'utf_16'), |
|
60 | 46 | } |
|
61 | 47 | # All legal variants of the BOM codecs. |
|
62 | 48 | # TODO: the list of aliases is not meant to be exhaustive, is there a |
|
63 | 49 | # better way ? |
|
64 | 50 | BOM_LIST = { |
|
65 | 51 | 'utf_16': 'utf_16', |
|
66 | 52 | 'u16': 'utf_16', |
|
67 | 53 | 'utf16': 'utf_16', |
|
68 | 54 | 'utf-16': 'utf_16', |
|
69 | 55 | 'utf16_be': 'utf16_be', |
|
70 | 56 | 'utf_16_be': 'utf16_be', |
|
71 | 57 | 'utf-16be': 'utf16_be', |
|
72 | 58 | 'utf16_le': 'utf16_le', |
|
73 | 59 | 'utf_16_le': 'utf16_le', |
|
74 | 60 | 'utf-16le': 'utf16_le', |
|
75 | 61 | 'utf_8': 'utf_8', |
|
76 | 62 | 'u8': 'utf_8', |
|
77 | 63 | 'utf': 'utf_8', |
|
78 | 64 | 'utf8': 'utf_8', |
|
79 | 65 | 'utf-8': 'utf_8', |
|
80 | 66 | } |
|
81 | 67 | |
|
82 | 68 | # Map of encodings to the BOM to write. |
|
83 | 69 | BOM_SET = { |
|
84 | 70 | 'utf_8': BOM_UTF8, |
|
85 | 71 | 'utf_16': BOM_UTF16, |
|
86 | 72 | 'utf16_be': BOM_UTF16_BE, |
|
87 | 73 | 'utf16_le': BOM_UTF16_LE, |
|
88 | 74 | None: BOM_UTF8 |
|
89 | 75 | } |
|
90 | 76 | |
|
91 | 77 | |
|
92 | 78 | def match_utf8(encoding): |
|
93 | 79 | return BOM_LIST.get(encoding.lower()) == 'utf_8' |
|
94 | 80 | |
|
95 | 81 | |
|
96 | 82 | # Quote strings used for writing values |
|
97 | 83 | squot = "'%s'" |
|
98 | 84 | dquot = '"%s"' |
|
99 | 85 | noquot = "%s" |
|
100 | 86 | wspace_plus = ' \r\t\n\v\t\'"' |
|
101 | 87 | tsquot = '"""%s"""' |
|
102 | 88 | tdquot = "'''%s'''" |
|
103 | 89 | |
|
104 | try: | |
|
105 | enumerate | |
|
106 | except NameError: | |
|
107 | def enumerate(obj): | |
|
108 | """enumerate for Python 2.2.""" | |
|
109 | i = -1 | |
|
110 | for item in obj: | |
|
111 | i += 1 | |
|
112 | yield i, item | |
|
113 | ||
|
114 | try: | |
|
115 | True, False | |
|
116 | except NameError: | |
|
117 | True, False = 1, 0 | |
|
118 | ||
|
119 | 90 | |
|
120 | 91 | __version__ = '4.5.2' |
|
121 | 92 | |
|
122 | 93 | __revision__ = '$Id: configobj.py 156 2006-01-31 14:57:08Z fuzzyman $' |
|
123 | 94 | |
|
124 | 95 | __docformat__ = "restructuredtext en" |
|
125 | 96 | |
|
126 | 97 | __all__ = ( |
|
127 | 98 | '__version__', |
|
128 | 99 | 'DEFAULT_INDENT_TYPE', |
|
129 | 100 | 'DEFAULT_INTERPOLATION', |
|
130 | 101 | 'ConfigObjError', |
|
131 | 102 | 'NestingError', |
|
132 | 103 | 'ParseError', |
|
133 | 104 | 'DuplicateError', |
|
134 | 105 | 'ConfigspecError', |
|
135 | 106 | 'ConfigObj', |
|
136 | 107 | 'SimpleVal', |
|
137 | 108 | 'InterpolationError', |
|
138 | 109 | 'InterpolationLoopError', |
|
139 | 110 | 'MissingInterpolationOption', |
|
140 | 111 | 'RepeatSectionError', |
|
141 | 112 | 'ReloadError', |
|
142 | 113 | 'UnreprError', |
|
143 | 114 | 'UnknownType', |
|
144 | 115 | '__docformat__', |
|
145 | 116 | 'flatten_errors', |
|
146 | 117 | ) |
|
147 | 118 | |
|
148 | 119 | DEFAULT_INTERPOLATION = 'configparser' |
|
149 | 120 | DEFAULT_INDENT_TYPE = ' ' |
|
150 | 121 | MAX_INTERPOL_DEPTH = 10 |
|
151 | 122 | |
|
152 | 123 | OPTION_DEFAULTS = { |
|
153 | 124 | 'interpolation': True, |
|
154 | 125 | 'raise_errors': False, |
|
155 | 126 | 'list_values': True, |
|
156 | 127 | 'create_empty': False, |
|
157 | 128 | 'file_error': False, |
|
158 | 129 | 'configspec': None, |
|
159 | 130 | 'stringify': True, |
|
160 | 131 | # option may be set to one of ('', ' ', '\t') |
|
161 | 132 | 'indent_type': None, |
|
162 | 133 | 'encoding': None, |
|
163 | 134 | 'default_encoding': None, |
|
164 | 135 | 'unrepr': False, |
|
165 | 136 | 'write_empty_values': False, |
|
166 | 137 | } |
|
167 | 138 | |
|
168 | 139 | |
|
169 | 140 | |
|
170 | 141 | def getObj(s): |
|
171 | 142 | s = "a=" + s |
|
172 | 143 | if compiler is None: |
|
173 | 144 | raise ImportError('compiler module not available') |
|
174 | 145 | p = compiler.parse(s) |
|
175 | 146 | return p.getChildren()[1].getChildren()[0].getChildren()[1] |
|
176 | 147 | |
|
177 | 148 | |
|
178 | 149 | class UnknownType(Exception): |
|
179 | 150 | pass |
|
180 | 151 | |
|
181 | 152 | |
|
182 | 153 | class Builder(object): |
|
183 | 154 | |
|
184 | 155 | def build(self, o): |
|
185 | 156 | m = getattr(self, 'build_' + o.__class__.__name__, None) |
|
186 | 157 | if m is None: |
|
187 | 158 | raise UnknownType(o.__class__.__name__) |
|
188 | 159 | return m(o) |
|
189 | 160 | |
|
190 | 161 | def build_List(self, o): |
|
191 | 162 | return map(self.build, o.getChildren()) |
|
192 | 163 | |
|
193 | 164 | def build_Const(self, o): |
|
194 | 165 | return o.value |
|
195 | 166 | |
|
196 | 167 | def build_Dict(self, o): |
|
197 | 168 | d = {} |
|
198 | 169 | i = iter(map(self.build, o.getChildren())) |
|
199 | 170 | for el in i: |
|
200 | 171 | d[el] = i.next() |
|
201 | 172 | return d |
|
202 | 173 | |
|
203 | 174 | def build_Tuple(self, o): |
|
204 | 175 | return tuple(self.build_List(o)) |
|
205 | 176 | |
|
206 | 177 | def build_Name(self, o): |
|
207 | 178 | if o.name == 'None': |
|
208 | 179 | return None |
|
209 | 180 | if o.name == 'True': |
|
210 | 181 | return True |
|
211 | 182 | if o.name == 'False': |
|
212 | 183 | return False |
|
213 | 184 | |
|
214 | 185 | # An undefined Name |
|
215 | 186 | raise UnknownType('Undefined Name') |
|
216 | 187 | |
|
217 | 188 | def build_Add(self, o): |
|
218 | 189 | real, imag = map(self.build_Const, o.getChildren()) |
|
219 | 190 | try: |
|
220 | 191 | real = float(real) |
|
221 | 192 | except TypeError: |
|
222 | 193 | raise UnknownType('Add') |
|
223 | 194 | if not isinstance(imag, complex) or imag.real != 0.0: |
|
224 | 195 | raise UnknownType('Add') |
|
225 | 196 | return real+imag |
|
226 | 197 | |
|
227 | 198 | def build_Getattr(self, o): |
|
228 | 199 | parent = self.build(o.expr) |
|
229 | 200 | return getattr(parent, o.attrname) |
|
230 | 201 | |
|
231 | 202 | def build_UnarySub(self, o): |
|
232 | 203 | return -self.build_Const(o.getChildren()[0]) |
|
233 | 204 | |
|
234 | 205 | def build_UnaryAdd(self, o): |
|
235 | 206 | return self.build_Const(o.getChildren()[0]) |
|
236 | 207 | |
|
237 | 208 | |
|
238 | 209 | _builder = Builder() |
|
239 | 210 | |
|
240 | 211 | |
|
241 | 212 | def unrepr(s): |
|
242 | 213 | if not s: |
|
243 | 214 | return s |
|
244 | 215 | return _builder.build(getObj(s)) |
|
245 | 216 | |
|
246 | 217 | |
|
247 | 218 | |
|
248 | 219 | class ConfigObjError(SyntaxError): |
|
249 | 220 | """ |
|
250 | 221 | This is the base class for all errors that ConfigObj raises. |
|
251 | 222 | It is a subclass of SyntaxError. |
|
252 | 223 | """ |
|
253 | 224 | def __init__(self, message='', line_number=None, line=''): |
|
254 | 225 | self.line = line |
|
255 | 226 | self.line_number = line_number |
|
256 | 227 | self.message = message |
|
257 | 228 | SyntaxError.__init__(self, message) |
|
258 | 229 | |
|
259 | 230 | |
|
260 | 231 | class NestingError(ConfigObjError): |
|
261 | 232 | """ |
|
262 | 233 | This error indicates a level of nesting that doesn't match. |
|
263 | 234 | """ |
|
264 | 235 | |
|
265 | 236 | |
|
266 | 237 | class ParseError(ConfigObjError): |
|
267 | 238 | """ |
|
268 | 239 | This error indicates that a line is badly written. |
|
269 | 240 | It is neither a valid ``key = value`` line, |
|
270 | 241 | nor a valid section marker line. |
|
271 | 242 | """ |
|
272 | 243 | |
|
273 | 244 | |
|
274 | 245 | class ReloadError(IOError): |
|
275 | 246 | """ |
|
276 | 247 | A 'reload' operation failed. |
|
277 | 248 | This exception is a subclass of ``IOError``. |
|
278 | 249 | """ |
|
279 | 250 | def __init__(self): |
|
280 | 251 | IOError.__init__(self, 'reload failed, filename is not set.') |
|
281 | 252 | |
|
282 | 253 | |
|
283 | 254 | class DuplicateError(ConfigObjError): |
|
284 | 255 | """ |
|
285 | 256 | The keyword or section specified already exists. |
|
286 | 257 | """ |
|
287 | 258 | |
|
288 | 259 | |
|
289 | 260 | class ConfigspecError(ConfigObjError): |
|
290 | 261 | """ |
|
291 | 262 | An error occured whilst parsing a configspec. |
|
292 | 263 | """ |
|
293 | 264 | |
|
294 | 265 | |
|
295 | 266 | class InterpolationError(ConfigObjError): |
|
296 | 267 | """Base class for the two interpolation errors.""" |
|
297 | 268 | |
|
298 | 269 | |
|
299 | 270 | class InterpolationLoopError(InterpolationError): |
|
300 | 271 | """Maximum interpolation depth exceeded in string interpolation.""" |
|
301 | 272 | |
|
302 | 273 | def __init__(self, option): |
|
303 | 274 | InterpolationError.__init__( |
|
304 | 275 | self, |
|
305 | 276 | 'interpolation loop detected in value "%s".' % option) |
|
306 | 277 | |
|
307 | 278 | |
|
308 | 279 | class RepeatSectionError(ConfigObjError): |
|
309 | 280 | """ |
|
310 | 281 | This error indicates additional sections in a section with a |
|
311 | 282 | ``__many__`` (repeated) section. |
|
312 | 283 | """ |
|
313 | 284 | |
|
314 | 285 | |
|
315 | 286 | class MissingInterpolationOption(InterpolationError): |
|
316 | 287 | """A value specified for interpolation was missing.""" |
|
317 | 288 | |
|
318 | 289 | def __init__(self, option): |
|
319 | 290 | InterpolationError.__init__( |
|
320 | 291 | self, |
|
321 | 292 | 'missing option "%s" in interpolation.' % option) |
|
322 | 293 | |
|
323 | 294 | |
|
324 | 295 | class UnreprError(ConfigObjError): |
|
325 | 296 | """An error parsing in unrepr mode.""" |
|
326 | 297 | |
|
327 | 298 | |
|
328 | 299 | |
|
329 | 300 | class InterpolationEngine(object): |
|
330 | 301 | """ |
|
331 | 302 | A helper class to help perform string interpolation. |
|
332 | 303 | |
|
333 | 304 | This class is an abstract base class; its descendants perform |
|
334 | 305 | the actual work. |
|
335 | 306 | """ |
|
336 | 307 | |
|
337 | 308 | # compiled regexp to use in self.interpolate() |
|
338 | 309 | _KEYCRE = re.compile(r"%\(([^)]*)\)s") |
|
339 | 310 | |
|
340 | 311 | def __init__(self, section): |
|
341 | 312 | # the Section instance that "owns" this engine |
|
342 | 313 | self.section = section |
|
343 | 314 | |
|
344 | 315 | |
|
345 | 316 | def interpolate(self, key, value): |
|
346 | 317 | def recursive_interpolate(key, value, section, backtrail): |
|
347 | 318 | """The function that does the actual work. |
|
348 | 319 | |
|
349 | 320 | ``value``: the string we're trying to interpolate. |
|
350 | 321 | ``section``: the section in which that string was found |
|
351 | 322 | ``backtrail``: a dict to keep track of where we've been, |
|
352 | 323 | to detect and prevent infinite recursion loops |
|
353 | 324 | |
|
354 | 325 | This is similar to a depth-first-search algorithm. |
|
355 | 326 | """ |
|
356 | 327 | # Have we been here already? |
|
357 | 328 | if backtrail.has_key((key, section.name)): |
|
358 | 329 | # Yes - infinite loop detected |
|
359 | 330 | raise InterpolationLoopError(key) |
|
360 | 331 | # Place a marker on our backtrail so we won't come back here again |
|
361 | 332 | backtrail[(key, section.name)] = 1 |
|
362 | 333 | |
|
363 | 334 | # Now start the actual work |
|
364 | 335 | match = self._KEYCRE.search(value) |
|
365 | 336 | while match: |
|
366 | 337 | # The actual parsing of the match is implementation-dependent, |
|
367 | 338 | # so delegate to our helper function |
|
368 | 339 | k, v, s = self._parse_match(match) |
|
369 | 340 | if k is None: |
|
370 | 341 | # That's the signal that no further interpolation is needed |
|
371 | 342 | replacement = v |
|
372 | 343 | else: |
|
373 | 344 | # Further interpolation may be needed to obtain final value |
|
374 | 345 | replacement = recursive_interpolate(k, v, s, backtrail) |
|
375 | 346 | # Replace the matched string with its final value |
|
376 | 347 | start, end = match.span() |
|
377 | 348 | value = ''.join((value[:start], replacement, value[end:])) |
|
378 | 349 | new_search_start = start + len(replacement) |
|
379 | 350 | # Pick up the next interpolation key, if any, for next time |
|
380 | 351 | # through the while loop |
|
381 | 352 | match = self._KEYCRE.search(value, new_search_start) |
|
382 | 353 | |
|
383 | 354 | # Now safe to come back here again; remove marker from backtrail |
|
384 | 355 | del backtrail[(key, section.name)] |
|
385 | 356 | |
|
386 | 357 | return value |
|
387 | 358 | |
|
388 | 359 | # Back in interpolate(), all we have to do is kick off the recursive |
|
389 | 360 | # function with appropriate starting values |
|
390 | 361 | value = recursive_interpolate(key, value, self.section, {}) |
|
391 | 362 | return value |
|
392 | 363 | |
|
393 | 364 | |
|
394 | 365 | def _fetch(self, key): |
|
395 | 366 | """Helper function to fetch values from owning section. |
|
396 | 367 | |
|
397 | 368 | Returns a 2-tuple: the value, and the section where it was found. |
|
398 | 369 | """ |
|
399 | 370 | # switch off interpolation before we try and fetch anything ! |
|
400 | 371 | save_interp = self.section.main.interpolation |
|
401 | 372 | self.section.main.interpolation = False |
|
402 | 373 | |
|
403 | 374 | # Start at section that "owns" this InterpolationEngine |
|
404 | 375 | current_section = self.section |
|
405 | 376 | while True: |
|
406 | 377 | # try the current section first |
|
407 | 378 | val = current_section.get(key) |
|
408 | 379 | if val is not None: |
|
409 | 380 | break |
|
410 | 381 | # try "DEFAULT" next |
|
411 | 382 | val = current_section.get('DEFAULT', {}).get(key) |
|
412 | 383 | if val is not None: |
|
413 | 384 | break |
|
414 | 385 | # move up to parent and try again |
|
415 | 386 | # top-level's parent is itself |
|
416 | 387 | if current_section.parent is current_section: |
|
417 | 388 | # reached top level, time to give up |
|
418 | 389 | break |
|
419 | 390 | current_section = current_section.parent |
|
420 | 391 | |
|
421 | 392 | # restore interpolation to previous value before returning |
|
422 | 393 | self.section.main.interpolation = save_interp |
|
423 | 394 | if val is None: |
|
424 | 395 | raise MissingInterpolationOption(key) |
|
425 | 396 | return val, current_section |
|
426 | 397 | |
|
427 | 398 | |
|
428 | 399 | def _parse_match(self, match): |
|
429 | 400 | """Implementation-dependent helper function. |
|
430 | 401 | |
|
431 | 402 | Will be passed a match object corresponding to the interpolation |
|
432 | 403 | key we just found (e.g., "%(foo)s" or "$foo"). Should look up that |
|
433 | 404 | key in the appropriate config file section (using the ``_fetch()`` |
|
434 | 405 | helper function) and return a 3-tuple: (key, value, section) |
|
435 | 406 | |
|
436 | 407 | ``key`` is the name of the key we're looking for |
|
437 | 408 | ``value`` is the value found for that key |
|
438 | 409 | ``section`` is a reference to the section where it was found |
|
439 | 410 | |
|
440 | 411 | ``key`` and ``section`` should be None if no further |
|
441 | 412 | interpolation should be performed on the resulting value |
|
442 | 413 | (e.g., if we interpolated "$$" and returned "$"). |
|
443 | 414 | """ |
|
444 | 415 | raise NotImplementedError() |
|
445 | 416 | |
|
446 | 417 | |
|
447 | 418 | |
|
448 | 419 | class ConfigParserInterpolation(InterpolationEngine): |
|
449 | 420 | """Behaves like ConfigParser.""" |
|
450 | 421 | _KEYCRE = re.compile(r"%\(([^)]*)\)s") |
|
451 | 422 | |
|
452 | 423 | def _parse_match(self, match): |
|
453 | 424 | key = match.group(1) |
|
454 | 425 | value, section = self._fetch(key) |
|
455 | 426 | return key, value, section |
|
456 | 427 | |
|
457 | 428 | |
|
458 | 429 | |
|
459 | 430 | class TemplateInterpolation(InterpolationEngine): |
|
460 | 431 | """Behaves like string.Template.""" |
|
461 | 432 | _delimiter = '$' |
|
462 | 433 | _KEYCRE = re.compile(r""" |
|
463 | 434 | \$(?: |
|
464 | 435 | (?P<escaped>\$) | # Two $ signs |
|
465 | 436 | (?P<named>[_a-z][_a-z0-9]*) | # $name format |
|
466 | 437 | {(?P<braced>[^}]*)} # ${name} format |
|
467 | 438 | ) |
|
468 | 439 | """, re.IGNORECASE | re.VERBOSE) |
|
469 | 440 | |
|
470 | 441 | def _parse_match(self, match): |
|
471 | 442 | # Valid name (in or out of braces): fetch value from section |
|
472 | 443 | key = match.group('named') or match.group('braced') |
|
473 | 444 | if key is not None: |
|
474 | 445 | value, section = self._fetch(key) |
|
475 | 446 | return key, value, section |
|
476 | 447 | # Escaped delimiter (e.g., $$): return single delimiter |
|
477 | 448 | if match.group('escaped') is not None: |
|
478 | 449 | # Return None for key and section to indicate it's time to stop |
|
479 | 450 | return None, self._delimiter, None |
|
480 | 451 | # Anything else: ignore completely, just return it unchanged |
|
481 | 452 | return None, match.group(), None |
|
482 | 453 | |
|
483 | 454 | |
|
484 | 455 | interpolation_engines = { |
|
485 | 456 | 'configparser': ConfigParserInterpolation, |
|
486 | 457 | 'template': TemplateInterpolation, |
|
487 | 458 | } |
|
488 | 459 | |
|
489 | 460 | |
|
490 | 461 | |
|
491 | 462 | class Section(dict): |
|
492 | 463 | """ |
|
493 | 464 | A dictionary-like object that represents a section in a config file. |
|
494 | 465 | |
|
495 | 466 | It does string interpolation if the 'interpolation' attribute |
|
496 | 467 | of the 'main' object is set to True. |
|
497 | 468 | |
|
498 | 469 | Interpolation is tried first from this object, then from the 'DEFAULT' |
|
499 | 470 | section of this object, next from the parent and its 'DEFAULT' section, |
|
500 | 471 | and so on until the main object is reached. |
|
501 | 472 | |
|
502 | 473 | A Section will behave like an ordered dictionary - following the |
|
503 | 474 | order of the ``scalars`` and ``sections`` attributes. |
|
504 | 475 | You can use this to change the order of members. |
|
505 | 476 | |
|
506 | 477 | Iteration follows the order: scalars, then sections. |
|
507 | 478 | """ |
|
508 | 479 | |
|
509 | 480 | def __init__(self, parent, depth, main, indict=None, name=None): |
|
510 | 481 | """ |
|
511 | 482 | * parent is the section above |
|
512 | 483 | * depth is the depth level of this section |
|
513 | 484 | * main is the main ConfigObj |
|
514 | 485 | * indict is a dictionary to initialise the section with |
|
515 | 486 | """ |
|
516 | 487 | if indict is None: |
|
517 | 488 | indict = {} |
|
518 | 489 | dict.__init__(self) |
|
519 | 490 | # used for nesting level *and* interpolation |
|
520 | 491 | self.parent = parent |
|
521 | 492 | # used for the interpolation attribute |
|
522 | 493 | self.main = main |
|
523 | 494 | # level of nesting depth of this Section |
|
524 | 495 | self.depth = depth |
|
525 | 496 | # purely for information |
|
526 | 497 | self.name = name |
|
527 | 498 | # |
|
528 | 499 | self._initialise() |
|
529 | 500 | # we do this explicitly so that __setitem__ is used properly |
|
530 | 501 | # (rather than just passing to ``dict.__init__``) |
|
531 | 502 | for entry, value in indict.iteritems(): |
|
532 | 503 | self[entry] = value |
|
533 | 504 | |
|
534 | 505 | |
|
535 | 506 | def _initialise(self): |
|
536 | 507 | # the sequence of scalar values in this Section |
|
537 | 508 | self.scalars = [] |
|
538 | 509 | # the sequence of sections in this Section |
|
539 | 510 | self.sections = [] |
|
540 | 511 | # for comments :-) |
|
541 | 512 | self.comments = {} |
|
542 | 513 | self.inline_comments = {} |
|
543 | 514 | # for the configspec |
|
544 | 515 | self.configspec = {} |
|
545 | 516 | self._order = [] |
|
546 | 517 | self._configspec_comments = {} |
|
547 | 518 | self._configspec_inline_comments = {} |
|
548 | 519 | self._cs_section_comments = {} |
|
549 | 520 | self._cs_section_inline_comments = {} |
|
550 | 521 | # for defaults |
|
551 | 522 | self.defaults = [] |
|
552 | 523 | self.default_values = {} |
|
553 | 524 | |
|
554 | 525 | |
|
555 | 526 | def _interpolate(self, key, value): |
|
556 | 527 | try: |
|
557 | 528 | # do we already have an interpolation engine? |
|
558 | 529 | engine = self._interpolation_engine |
|
559 | 530 | except AttributeError: |
|
560 | 531 | # not yet: first time running _interpolate(), so pick the engine |
|
561 | 532 | name = self.main.interpolation |
|
562 | 533 | if name == True: # note that "if name:" would be incorrect here |
|
563 | 534 | # backwards-compatibility: interpolation=True means use default |
|
564 | 535 | name = DEFAULT_INTERPOLATION |
|
565 | 536 | name = name.lower() # so that "Template", "template", etc. all work |
|
566 | 537 | class_ = interpolation_engines.get(name, None) |
|
567 | 538 | if class_ is None: |
|
568 | 539 | # invalid value for self.main.interpolation |
|
569 | 540 | self.main.interpolation = False |
|
570 | 541 | return value |
|
571 | 542 | else: |
|
572 | 543 | # save reference to engine so we don't have to do this again |
|
573 | 544 | engine = self._interpolation_engine = class_(self) |
|
574 | 545 | # let the engine do the actual work |
|
575 | 546 | return engine.interpolate(key, value) |
|
576 | 547 | |
|
577 | 548 | |
|
578 | 549 | def __getitem__(self, key): |
|
579 | 550 | """Fetch the item and do string interpolation.""" |
|
580 | 551 | val = dict.__getitem__(self, key) |
|
581 | 552 | if self.main.interpolation and isinstance(val, StringTypes): |
|
582 | 553 | return self._interpolate(key, val) |
|
583 | 554 | return val |
|
584 | 555 | |
|
585 | 556 | |
|
586 | 557 | def __setitem__(self, key, value, unrepr=False): |
|
587 | 558 | """ |
|
588 | 559 | Correctly set a value. |
|
589 | 560 | |
|
590 | 561 | Making dictionary values Section instances. |
|
591 | 562 | (We have to special case 'Section' instances - which are also dicts) |
|
592 | 563 | |
|
593 | 564 | Keys must be strings. |
|
594 | 565 | Values need only be strings (or lists of strings) if |
|
595 | 566 | ``main.stringify`` is set. |
|
596 | 567 | |
|
597 | 568 | `unrepr`` must be set when setting a value to a dictionary, without |
|
598 | 569 | creating a new sub-section. |
|
599 | 570 | """ |
|
600 | 571 | if not isinstance(key, StringTypes): |
|
601 | 572 | raise ValueError('The key "%s" is not a string.' % key) |
|
602 | 573 | |
|
603 | 574 | # add the comment |
|
604 | 575 | if not self.comments.has_key(key): |
|
605 | 576 | self.comments[key] = [] |
|
606 | 577 | self.inline_comments[key] = '' |
|
607 | 578 | # remove the entry from defaults |
|
608 | 579 | if key in self.defaults: |
|
609 | 580 | self.defaults.remove(key) |
|
610 | 581 | # |
|
611 | 582 | if isinstance(value, Section): |
|
612 | 583 | if not self.has_key(key): |
|
613 | 584 | self.sections.append(key) |
|
614 | 585 | dict.__setitem__(self, key, value) |
|
615 | 586 | elif isinstance(value, dict) and not unrepr: |
|
616 | 587 | # First create the new depth level, |
|
617 | 588 | # then create the section |
|
618 | 589 | if not self.has_key(key): |
|
619 | 590 | self.sections.append(key) |
|
620 | 591 | new_depth = self.depth + 1 |
|
621 | 592 | dict.__setitem__( |
|
622 | 593 | self, |
|
623 | 594 | key, |
|
624 | 595 | Section( |
|
625 | 596 | self, |
|
626 | 597 | new_depth, |
|
627 | 598 | self.main, |
|
628 | 599 | indict=value, |
|
629 | 600 | name=key)) |
|
630 | 601 | else: |
|
631 | 602 | if not self.has_key(key): |
|
632 | 603 | self.scalars.append(key) |
|
633 | 604 | if not self.main.stringify: |
|
634 | 605 | if isinstance(value, StringTypes): |
|
635 | 606 | pass |
|
636 | 607 | elif isinstance(value, (list, tuple)): |
|
637 | 608 | for entry in value: |
|
638 | 609 | if not isinstance(entry, StringTypes): |
|
639 | 610 | raise TypeError('Value is not a string "%s".' % entry) |
|
640 | 611 | else: |
|
641 | 612 | raise TypeError('Value is not a string "%s".' % value) |
|
642 | 613 | dict.__setitem__(self, key, value) |
|
643 | 614 | |
|
644 | 615 | |
|
645 | 616 | def __delitem__(self, key): |
|
646 | 617 | """Remove items from the sequence when deleting.""" |
|
647 | 618 | dict. __delitem__(self, key) |
|
648 | 619 | if key in self.scalars: |
|
649 | 620 | self.scalars.remove(key) |
|
650 | 621 | else: |
|
651 | 622 | self.sections.remove(key) |
|
652 | 623 | del self.comments[key] |
|
653 | 624 | del self.inline_comments[key] |
|
654 | 625 | |
|
655 | 626 | |
|
656 | 627 | def get(self, key, default=None): |
|
657 | 628 | """A version of ``get`` that doesn't bypass string interpolation.""" |
|
658 | 629 | try: |
|
659 | 630 | return self[key] |
|
660 | 631 | except KeyError: |
|
661 | 632 | return default |
|
662 | 633 | |
|
663 | 634 | |
|
664 | 635 | def update(self, indict): |
|
665 | 636 | """ |
|
666 | 637 | A version of update that uses our ``__setitem__``. |
|
667 | 638 | """ |
|
668 | 639 | for entry in indict: |
|
669 | 640 | self[entry] = indict[entry] |
|
670 | 641 | |
|
671 | 642 | |
|
672 | 643 | def pop(self, key, *args): |
|
673 | 644 | """ |
|
674 | 645 | 'D.pop(k[,d]) -> v, remove specified key and return the corresponding value. |
|
675 | 646 | If key is not found, d is returned if given, otherwise KeyError is raised' |
|
676 | 647 | """ |
|
677 | 648 | val = dict.pop(self, key, *args) |
|
678 | 649 | if key in self.scalars: |
|
679 | 650 | del self.comments[key] |
|
680 | 651 | del self.inline_comments[key] |
|
681 | 652 | self.scalars.remove(key) |
|
682 | 653 | elif key in self.sections: |
|
683 | 654 | del self.comments[key] |
|
684 | 655 | del self.inline_comments[key] |
|
685 | 656 | self.sections.remove(key) |
|
686 | 657 | if self.main.interpolation and isinstance(val, StringTypes): |
|
687 | 658 | return self._interpolate(key, val) |
|
688 | 659 | return val |
|
689 | 660 | |
|
690 | 661 | |
|
691 | 662 | def popitem(self): |
|
692 | 663 | """Pops the first (key,val)""" |
|
693 | 664 | sequence = (self.scalars + self.sections) |
|
694 | 665 | if not sequence: |
|
695 | 666 | raise KeyError(": 'popitem(): dictionary is empty'") |
|
696 | 667 | key = sequence[0] |
|
697 | 668 | val = self[key] |
|
698 | 669 | del self[key] |
|
699 | 670 | return key, val |
|
700 | 671 | |
|
701 | 672 | |
|
702 | 673 | def clear(self): |
|
703 | 674 | """ |
|
704 | 675 | A version of clear that also affects scalars/sections |
|
705 | 676 | Also clears comments and configspec. |
|
706 | 677 | |
|
707 | 678 | Leaves other attributes alone : |
|
708 | 679 | depth/main/parent are not affected |
|
709 | 680 | """ |
|
710 | 681 | dict.clear(self) |
|
711 | 682 | self.scalars = [] |
|
712 | 683 | self.sections = [] |
|
713 | 684 | self.comments = {} |
|
714 | 685 | self.inline_comments = {} |
|
715 | 686 | self.configspec = {} |
|
716 | 687 | |
|
717 | 688 | |
|
718 | 689 | def setdefault(self, key, default=None): |
|
719 | 690 | """A version of setdefault that sets sequence if appropriate.""" |
|
720 | 691 | try: |
|
721 | 692 | return self[key] |
|
722 | 693 | except KeyError: |
|
723 | 694 | self[key] = default |
|
724 | 695 | return self[key] |
|
725 | 696 | |
|
726 | 697 | |
|
727 | 698 | def items(self): |
|
728 | 699 | """D.items() -> list of D's (key, value) pairs, as 2-tuples""" |
|
729 | 700 | return zip((self.scalars + self.sections), self.values()) |
|
730 | 701 | |
|
731 | 702 | |
|
732 | 703 | def keys(self): |
|
733 | 704 | """D.keys() -> list of D's keys""" |
|
734 | 705 | return (self.scalars + self.sections) |
|
735 | 706 | |
|
736 | 707 | |
|
737 | 708 | def values(self): |
|
738 | 709 | """D.values() -> list of D's values""" |
|
739 | 710 | return [self[key] for key in (self.scalars + self.sections)] |
|
740 | 711 | |
|
741 | 712 | |
|
742 | 713 | def iteritems(self): |
|
743 | 714 | """D.iteritems() -> an iterator over the (key, value) items of D""" |
|
744 | 715 | return iter(self.items()) |
|
745 | 716 | |
|
746 | 717 | |
|
747 | 718 | def iterkeys(self): |
|
748 | 719 | """D.iterkeys() -> an iterator over the keys of D""" |
|
749 | 720 | return iter((self.scalars + self.sections)) |
|
750 | 721 | |
|
751 | 722 | __iter__ = iterkeys |
|
752 | 723 | |
|
753 | 724 | |
|
754 | 725 | def itervalues(self): |
|
755 | 726 | """D.itervalues() -> an iterator over the values of D""" |
|
756 | 727 | return iter(self.values()) |
|
757 | 728 | |
|
758 | 729 | |
|
759 | 730 | def __repr__(self): |
|
760 | 731 | """x.__repr__() <==> repr(x)""" |
|
761 | 732 | return '{%s}' % ', '.join([('%s: %s' % (repr(key), repr(self[key]))) |
|
762 | 733 | for key in (self.scalars + self.sections)]) |
|
763 | 734 | |
|
764 | 735 | __str__ = __repr__ |
|
765 | 736 | __str__.__doc__ = "x.__str__() <==> str(x)" |
|
766 | 737 | |
|
767 | 738 | |
|
768 | 739 | # Extra methods - not in a normal dictionary |
|
769 | 740 | |
|
770 | 741 | def dict(self): |
|
771 | 742 | """ |
|
772 | 743 | Return a deepcopy of self as a dictionary. |
|
773 | 744 | |
|
774 | 745 | All members that are ``Section`` instances are recursively turned to |
|
775 | 746 | ordinary dictionaries - by calling their ``dict`` method. |
|
776 | 747 | |
|
777 | 748 | >>> n = a.dict() |
|
778 | 749 | >>> n == a |
|
779 | 750 | 1 |
|
780 | 751 | >>> n is a |
|
781 | 752 | 0 |
|
782 | 753 | """ |
|
783 | 754 | newdict = {} |
|
784 | 755 | for entry in self: |
|
785 | 756 | this_entry = self[entry] |
|
786 | 757 | if isinstance(this_entry, Section): |
|
787 | 758 | this_entry = this_entry.dict() |
|
788 | 759 | elif isinstance(this_entry, list): |
|
789 | 760 | # create a copy rather than a reference |
|
790 | 761 | this_entry = list(this_entry) |
|
791 | 762 | elif isinstance(this_entry, tuple): |
|
792 | 763 | # create a copy rather than a reference |
|
793 | 764 | this_entry = tuple(this_entry) |
|
794 | 765 | newdict[entry] = this_entry |
|
795 | 766 | return newdict |
|
796 | 767 | |
|
797 | 768 | |
|
798 | 769 | def merge(self, indict): |
|
799 | 770 | """ |
|
800 | 771 | A recursive update - useful for merging config files. |
|
801 | 772 | |
|
802 | 773 | >>> a = '''[section1] |
|
803 | 774 | ... option1 = True |
|
804 | 775 | ... [[subsection]] |
|
805 | 776 | ... more_options = False |
|
806 | 777 | ... # end of file'''.splitlines() |
|
807 | 778 | >>> b = '''# File is user.ini |
|
808 | 779 | ... [section1] |
|
809 | 780 | ... option1 = False |
|
810 | 781 | ... # end of file'''.splitlines() |
|
811 | 782 | >>> c1 = ConfigObj(b) |
|
812 | 783 | >>> c2 = ConfigObj(a) |
|
813 | 784 | >>> c2.merge(c1) |
|
814 | 785 | >>> c2 |
|
815 | 786 | {'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}} |
|
816 | 787 | """ |
|
817 | for key, val in indict.items(): | |
|
788 | for key, val in indict.iteritems(): | |
|
818 | 789 | if (key in self and isinstance(self[key], dict) and |
|
819 | 790 | isinstance(val, dict)): |
|
820 | 791 | self[key].merge(val) |
|
821 | 792 | else: |
|
822 | 793 | self[key] = val |
|
823 | 794 | |
|
824 | 795 | |
|
825 | 796 | def rename(self, oldkey, newkey): |
|
826 | 797 | """ |
|
827 | 798 | Change a keyname to another, without changing position in sequence. |
|
828 | 799 | |
|
829 | 800 | Implemented so that transformations can be made on keys, |
|
830 | 801 | as well as on values. (used by encode and decode) |
|
831 | 802 | |
|
832 | 803 | Also renames comments. |
|
833 | 804 | """ |
|
834 | 805 | if oldkey in self.scalars: |
|
835 | 806 | the_list = self.scalars |
|
836 | 807 | elif oldkey in self.sections: |
|
837 | 808 | the_list = self.sections |
|
838 | 809 | else: |
|
839 | 810 | raise KeyError('Key "%s" not found.' % oldkey) |
|
840 | 811 | pos = the_list.index(oldkey) |
|
841 | 812 | # |
|
842 | 813 | val = self[oldkey] |
|
843 | 814 | dict.__delitem__(self, oldkey) |
|
844 | 815 | dict.__setitem__(self, newkey, val) |
|
845 | 816 | the_list.remove(oldkey) |
|
846 | 817 | the_list.insert(pos, newkey) |
|
847 | 818 | comm = self.comments[oldkey] |
|
848 | 819 | inline_comment = self.inline_comments[oldkey] |
|
849 | 820 | del self.comments[oldkey] |
|
850 | 821 | del self.inline_comments[oldkey] |
|
851 | 822 | self.comments[newkey] = comm |
|
852 | 823 | self.inline_comments[newkey] = inline_comment |
|
853 | 824 | |
|
854 | 825 | |
|
855 | 826 | def walk(self, function, raise_errors=True, |
|
856 | 827 | call_on_sections=False, **keywargs): |
|
857 | 828 | """ |
|
858 | 829 | Walk every member and call a function on the keyword and value. |
|
859 | 830 | |
|
860 | 831 | Return a dictionary of the return values |
|
861 | 832 | |
|
862 | 833 | If the function raises an exception, raise the errror |
|
863 | 834 | unless ``raise_errors=False``, in which case set the return value to |
|
864 | 835 | ``False``. |
|
865 | 836 | |
|
866 | 837 | Any unrecognised keyword arguments you pass to walk, will be pased on |
|
867 | 838 | to the function you pass in. |
|
868 | 839 | |
|
869 | 840 | Note: if ``call_on_sections`` is ``True`` then - on encountering a |
|
870 | 841 | subsection, *first* the function is called for the *whole* subsection, |
|
871 | 842 | and then recurses into it's members. This means your function must be |
|
872 | 843 | able to handle strings, dictionaries and lists. This allows you |
|
873 | 844 | to change the key of subsections as well as for ordinary members. The |
|
874 | 845 | return value when called on the whole subsection has to be discarded. |
|
875 | 846 | |
|
876 | 847 | See the encode and decode methods for examples, including functions. |
|
877 | 848 | |
|
878 | 849 | .. caution:: |
|
879 | 850 | |
|
880 | 851 | You can use ``walk`` to transform the names of members of a section |
|
881 | 852 | but you mustn't add or delete members. |
|
882 | 853 | |
|
883 | 854 | >>> config = '''[XXXXsection] |
|
884 | 855 | ... XXXXkey = XXXXvalue'''.splitlines() |
|
885 | 856 | >>> cfg = ConfigObj(config) |
|
886 | 857 | >>> cfg |
|
887 | 858 | {'XXXXsection': {'XXXXkey': 'XXXXvalue'}} |
|
888 | 859 | >>> def transform(section, key): |
|
889 | 860 | ... val = section[key] |
|
890 | 861 | ... newkey = key.replace('XXXX', 'CLIENT1') |
|
891 | 862 | ... section.rename(key, newkey) |
|
892 | 863 | ... if isinstance(val, (tuple, list, dict)): |
|
893 | 864 | ... pass |
|
894 | 865 | ... else: |
|
895 | 866 | ... val = val.replace('XXXX', 'CLIENT1') |
|
896 | 867 | ... section[newkey] = val |
|
897 | 868 | >>> cfg.walk(transform, call_on_sections=True) |
|
898 | 869 | {'CLIENT1section': {'CLIENT1key': None}} |
|
899 | 870 | >>> cfg |
|
900 | 871 | {'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}} |
|
901 | 872 | """ |
|
902 | 873 | out = {} |
|
903 | 874 | # scalars first |
|
904 | 875 | for i in range(len(self.scalars)): |
|
905 | 876 | entry = self.scalars[i] |
|
906 | 877 | try: |
|
907 | 878 | val = function(self, entry, **keywargs) |
|
908 | 879 | # bound again in case name has changed |
|
909 | 880 | entry = self.scalars[i] |
|
910 | 881 | out[entry] = val |
|
911 | 882 | except Exception: |
|
912 | 883 | if raise_errors: |
|
913 | 884 | raise |
|
914 | 885 | else: |
|
915 | 886 | entry = self.scalars[i] |
|
916 | 887 | out[entry] = False |
|
917 | 888 | # then sections |
|
918 | 889 | for i in range(len(self.sections)): |
|
919 | 890 | entry = self.sections[i] |
|
920 | 891 | if call_on_sections: |
|
921 | 892 | try: |
|
922 | 893 | function(self, entry, **keywargs) |
|
923 | 894 | except Exception: |
|
924 | 895 | if raise_errors: |
|
925 | 896 | raise |
|
926 | 897 | else: |
|
927 | 898 | entry = self.sections[i] |
|
928 | 899 | out[entry] = False |
|
929 | 900 | # bound again in case name has changed |
|
930 | 901 | entry = self.sections[i] |
|
931 | 902 | # previous result is discarded |
|
932 | 903 | out[entry] = self[entry].walk( |
|
933 | 904 | function, |
|
934 | 905 | raise_errors=raise_errors, |
|
935 | 906 | call_on_sections=call_on_sections, |
|
936 | 907 | **keywargs) |
|
937 | 908 | return out |
|
938 | 909 | |
|
939 | 910 | |
|
940 | 911 | def decode(self, encoding): |
|
941 | 912 | """ |
|
942 | 913 | Decode all strings and values to unicode, using the specified encoding. |
|
943 | 914 | |
|
944 | 915 | Works with subsections and list values. |
|
945 | 916 | |
|
946 | 917 | Uses the ``walk`` method. |
|
947 | 918 | |
|
948 | 919 | Testing ``encode`` and ``decode``. |
|
949 | 920 | >>> m = ConfigObj(a) |
|
950 | 921 | >>> m.decode('ascii') |
|
951 | 922 | >>> def testuni(val): |
|
952 | 923 | ... for entry in val: |
|
953 | 924 | ... if not isinstance(entry, unicode): |
|
954 | 925 | ... print >> sys.stderr, type(entry) |
|
955 | 926 | ... raise AssertionError, 'decode failed.' |
|
956 | 927 | ... if isinstance(val[entry], dict): |
|
957 | 928 | ... testuni(val[entry]) |
|
958 | 929 | ... elif not isinstance(val[entry], unicode): |
|
959 | 930 | ... raise AssertionError, 'decode failed.' |
|
960 | 931 | >>> testuni(m) |
|
961 | 932 | >>> m.encode('ascii') |
|
962 | 933 | >>> a == m |
|
963 | 934 | 1 |
|
964 | 935 | """ |
|
965 | 936 | warn('use of ``decode`` is deprecated.', DeprecationWarning) |
|
966 | 937 | def decode(section, key, encoding=encoding, warn=True): |
|
967 | 938 | """ """ |
|
968 | 939 | val = section[key] |
|
969 | 940 | if isinstance(val, (list, tuple)): |
|
970 | 941 | newval = [] |
|
971 | 942 | for entry in val: |
|
972 | 943 | newval.append(entry.decode(encoding)) |
|
973 | 944 | elif isinstance(val, dict): |
|
974 | 945 | newval = val |
|
975 | 946 | else: |
|
976 | 947 | newval = val.decode(encoding) |
|
977 | 948 | newkey = key.decode(encoding) |
|
978 | 949 | section.rename(key, newkey) |
|
979 | 950 | section[newkey] = newval |
|
980 | 951 | # using ``call_on_sections`` allows us to modify section names |
|
981 | 952 | self.walk(decode, call_on_sections=True) |
|
982 | 953 | |
|
983 | 954 | |
|
984 | 955 | def encode(self, encoding): |
|
985 | 956 | """ |
|
986 | 957 | Encode all strings and values from unicode, |
|
987 | 958 | using the specified encoding. |
|
988 | 959 | |
|
989 | 960 | Works with subsections and list values. |
|
990 | 961 | Uses the ``walk`` method. |
|
991 | 962 | """ |
|
992 | 963 | warn('use of ``encode`` is deprecated.', DeprecationWarning) |
|
993 | 964 | def encode(section, key, encoding=encoding): |
|
994 | 965 | """ """ |
|
995 | 966 | val = section[key] |
|
996 | 967 | if isinstance(val, (list, tuple)): |
|
997 | 968 | newval = [] |
|
998 | 969 | for entry in val: |
|
999 | 970 | newval.append(entry.encode(encoding)) |
|
1000 | 971 | elif isinstance(val, dict): |
|
1001 | 972 | newval = val |
|
1002 | 973 | else: |
|
1003 | 974 | newval = val.encode(encoding) |
|
1004 | 975 | newkey = key.encode(encoding) |
|
1005 | 976 | section.rename(key, newkey) |
|
1006 | 977 | section[newkey] = newval |
|
1007 | 978 | self.walk(encode, call_on_sections=True) |
|
1008 | 979 | |
|
1009 | 980 | |
|
1010 | 981 | def istrue(self, key): |
|
1011 | 982 | """A deprecated version of ``as_bool``.""" |
|
1012 | 983 | warn('use of ``istrue`` is deprecated. Use ``as_bool`` method ' |
|
1013 | 984 | 'instead.', DeprecationWarning) |
|
1014 | 985 | return self.as_bool(key) |
|
1015 | 986 | |
|
1016 | 987 | |
|
1017 | 988 | def as_bool(self, key): |
|
1018 | 989 | """ |
|
1019 | 990 | Accepts a key as input. The corresponding value must be a string or |
|
1020 | 991 | the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to |
|
1021 | 992 | retain compatibility with Python 2.2. |
|
1022 | 993 | |
|
1023 | 994 | If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns |
|
1024 | 995 | ``True``. |
|
1025 | 996 | |
|
1026 | 997 | If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns |
|
1027 | 998 | ``False``. |
|
1028 | 999 | |
|
1029 | 1000 | ``as_bool`` is not case sensitive. |
|
1030 | 1001 | |
|
1031 | 1002 | Any other input will raise a ``ValueError``. |
|
1032 | 1003 | |
|
1033 | 1004 | >>> a = ConfigObj() |
|
1034 | 1005 | >>> a['a'] = 'fish' |
|
1035 | 1006 | >>> a.as_bool('a') |
|
1036 | 1007 | Traceback (most recent call last): |
|
1037 | 1008 | ValueError: Value "fish" is neither True nor False |
|
1038 | 1009 | >>> a['b'] = 'True' |
|
1039 | 1010 | >>> a.as_bool('b') |
|
1040 | 1011 | 1 |
|
1041 | 1012 | >>> a['b'] = 'off' |
|
1042 | 1013 | >>> a.as_bool('b') |
|
1043 | 1014 | 0 |
|
1044 | 1015 | """ |
|
1045 | 1016 | val = self[key] |
|
1046 | 1017 | if val == True: |
|
1047 | 1018 | return True |
|
1048 | 1019 | elif val == False: |
|
1049 | 1020 | return False |
|
1050 | 1021 | else: |
|
1051 | 1022 | try: |
|
1052 | 1023 | if not isinstance(val, StringTypes): |
|
1053 | 1024 | # TODO: Why do we raise a KeyError here? |
|
1054 | 1025 | raise KeyError() |
|
1055 | 1026 | else: |
|
1056 | 1027 | return self.main._bools[val.lower()] |
|
1057 | 1028 | except KeyError: |
|
1058 | 1029 | raise ValueError('Value "%s" is neither True nor False' % val) |
|
1059 | 1030 | |
|
1060 | 1031 | |
|
1061 | 1032 | def as_int(self, key): |
|
1062 | 1033 | """ |
|
1063 | 1034 | A convenience method which coerces the specified value to an integer. |
|
1064 | 1035 | |
|
1065 | 1036 | If the value is an invalid literal for ``int``, a ``ValueError`` will |
|
1066 | 1037 | be raised. |
|
1067 | 1038 | |
|
1068 | 1039 | >>> a = ConfigObj() |
|
1069 | 1040 | >>> a['a'] = 'fish' |
|
1070 | 1041 | >>> a.as_int('a') |
|
1071 | 1042 | Traceback (most recent call last): |
|
1072 | 1043 | ValueError: invalid literal for int(): fish |
|
1073 | 1044 | >>> a['b'] = '1' |
|
1074 | 1045 | >>> a.as_int('b') |
|
1075 | 1046 | 1 |
|
1076 | 1047 | >>> a['b'] = '3.2' |
|
1077 | 1048 | >>> a.as_int('b') |
|
1078 | 1049 | Traceback (most recent call last): |
|
1079 | 1050 | ValueError: invalid literal for int(): 3.2 |
|
1080 | 1051 | """ |
|
1081 | 1052 | return int(self[key]) |
|
1082 | 1053 | |
|
1083 | 1054 | |
|
1084 | 1055 | def as_float(self, key): |
|
1085 | 1056 | """ |
|
1086 | 1057 | A convenience method which coerces the specified value to a float. |
|
1087 | 1058 | |
|
1088 | 1059 | If the value is an invalid literal for ``float``, a ``ValueError`` will |
|
1089 | 1060 | be raised. |
|
1090 | 1061 | |
|
1091 | 1062 | >>> a = ConfigObj() |
|
1092 | 1063 | >>> a['a'] = 'fish' |
|
1093 | 1064 | >>> a.as_float('a') |
|
1094 | 1065 | Traceback (most recent call last): |
|
1095 | 1066 | ValueError: invalid literal for float(): fish |
|
1096 | 1067 | >>> a['b'] = '1' |
|
1097 | 1068 | >>> a.as_float('b') |
|
1098 | 1069 | 1.0 |
|
1099 | 1070 | >>> a['b'] = '3.2' |
|
1100 | 1071 | >>> a.as_float('b') |
|
1101 | 1072 | 3.2000000000000002 |
|
1102 | 1073 | """ |
|
1103 | 1074 | return float(self[key]) |
|
1104 | 1075 | |
|
1105 | 1076 | |
|
1106 | 1077 | def restore_default(self, key): |
|
1107 | 1078 | """ |
|
1108 | 1079 | Restore (and return) default value for the specified key. |
|
1109 | 1080 | |
|
1110 | 1081 | This method will only work for a ConfigObj that was created |
|
1111 | 1082 | with a configspec and has been validated. |
|
1112 | 1083 | |
|
1113 | 1084 | If there is no default value for this key, ``KeyError`` is raised. |
|
1114 | 1085 | """ |
|
1115 | 1086 | default = self.default_values[key] |
|
1116 | 1087 | dict.__setitem__(self, key, default) |
|
1117 | 1088 | if key not in self.defaults: |
|
1118 | 1089 | self.defaults.append(key) |
|
1119 | 1090 | return default |
|
1120 | 1091 | |
|
1121 | 1092 | |
|
1122 | 1093 | def restore_defaults(self): |
|
1123 | 1094 | """ |
|
1124 | 1095 | Recursively restore default values to all members |
|
1125 | 1096 | that have them. |
|
1126 | 1097 | |
|
1127 | 1098 | This method will only work for a ConfigObj that was created |
|
1128 | 1099 | with a configspec and has been validated. |
|
1129 | 1100 | |
|
1130 | 1101 | It doesn't delete or modify entries without default values. |
|
1131 | 1102 | """ |
|
1132 | 1103 | for key in self.default_values: |
|
1133 | 1104 | self.restore_default(key) |
|
1134 | 1105 | |
|
1135 | 1106 | for section in self.sections: |
|
1136 | 1107 | self[section].restore_defaults() |
|
1137 | 1108 | |
|
1138 | 1109 | |
|
1139 | 1110 | class ConfigObj(Section): |
|
1140 | 1111 | """An object to read, create, and write config files.""" |
|
1141 | 1112 | |
|
1142 | 1113 | _keyword = re.compile(r'''^ # line start |
|
1143 | 1114 | (\s*) # indentation |
|
1144 | 1115 | ( # keyword |
|
1145 | 1116 | (?:".*?")| # double quotes |
|
1146 | 1117 | (?:'.*?')| # single quotes |
|
1147 | 1118 | (?:[^'"=].*?) # no quotes |
|
1148 | 1119 | ) |
|
1149 | 1120 | \s*=\s* # divider |
|
1150 | 1121 | (.*) # value (including list values and comments) |
|
1151 | 1122 | $ # line end |
|
1152 | 1123 | ''', |
|
1153 | 1124 | re.VERBOSE) |
|
1154 | 1125 | |
|
1155 | 1126 | _sectionmarker = re.compile(r'''^ |
|
1156 | 1127 | (\s*) # 1: indentation |
|
1157 | 1128 | ((?:\[\s*)+) # 2: section marker open |
|
1158 | 1129 | ( # 3: section name open |
|
1159 | 1130 | (?:"\s*\S.*?\s*")| # at least one non-space with double quotes |
|
1160 | 1131 | (?:'\s*\S.*?\s*')| # at least one non-space with single quotes |
|
1161 | 1132 | (?:[^'"\s].*?) # at least one non-space unquoted |
|
1162 | 1133 | ) # section name close |
|
1163 | 1134 | ((?:\s*\])+) # 4: section marker close |
|
1164 | 1135 | \s*(\#.*)? # 5: optional comment |
|
1165 | 1136 | $''', |
|
1166 | 1137 | re.VERBOSE) |
|
1167 | 1138 | |
|
1168 | 1139 | # this regexp pulls list values out as a single string |
|
1169 | 1140 | # or single values and comments |
|
1170 | 1141 | # FIXME: this regex adds a '' to the end of comma terminated lists |
|
1171 | 1142 | # workaround in ``_handle_value`` |
|
1172 | 1143 | _valueexp = re.compile(r'''^ |
|
1173 | 1144 | (?: |
|
1174 | 1145 | (?: |
|
1175 | 1146 | ( |
|
1176 | 1147 | (?: |
|
1177 | 1148 | (?: |
|
1178 | 1149 | (?:".*?")| # double quotes |
|
1179 | 1150 | (?:'.*?')| # single quotes |
|
1180 | 1151 | (?:[^'",\#][^,\#]*?) # unquoted |
|
1181 | 1152 | ) |
|
1182 | 1153 | \s*,\s* # comma |
|
1183 | 1154 | )* # match all list items ending in a comma (if any) |
|
1184 | 1155 | ) |
|
1185 | 1156 | ( |
|
1186 | 1157 | (?:".*?")| # double quotes |
|
1187 | 1158 | (?:'.*?')| # single quotes |
|
1188 | 1159 | (?:[^'",\#\s][^,]*?)| # unquoted |
|
1189 | 1160 | (?:(?<!,)) # Empty value |
|
1190 | 1161 | )? # last item in a list - or string value |
|
1191 | 1162 | )| |
|
1192 | 1163 | (,) # alternatively a single comma - empty list |
|
1193 | 1164 | ) |
|
1194 | 1165 | \s*(\#.*)? # optional comment |
|
1195 | 1166 | $''', |
|
1196 | 1167 | re.VERBOSE) |
|
1197 | 1168 | |
|
1198 | 1169 | # use findall to get the members of a list value |
|
1199 | 1170 | _listvalueexp = re.compile(r''' |
|
1200 | 1171 | ( |
|
1201 | 1172 | (?:".*?")| # double quotes |
|
1202 | 1173 | (?:'.*?')| # single quotes |
|
1203 | 1174 | (?:[^'",\#].*?) # unquoted |
|
1204 | 1175 | ) |
|
1205 | 1176 | \s*,\s* # comma |
|
1206 | 1177 | ''', |
|
1207 | 1178 | re.VERBOSE) |
|
1208 | 1179 | |
|
1209 | 1180 | # this regexp is used for the value |
|
1210 | 1181 | # when lists are switched off |
|
1211 | 1182 | _nolistvalue = re.compile(r'''^ |
|
1212 | 1183 | ( |
|
1213 | 1184 | (?:".*?")| # double quotes |
|
1214 | 1185 | (?:'.*?')| # single quotes |
|
1215 | 1186 | (?:[^'"\#].*?)| # unquoted |
|
1216 | 1187 | (?:) # Empty value |
|
1217 | 1188 | ) |
|
1218 | 1189 | \s*(\#.*)? # optional comment |
|
1219 | 1190 | $''', |
|
1220 | 1191 | re.VERBOSE) |
|
1221 | 1192 | |
|
1222 | 1193 | # regexes for finding triple quoted values on one line |
|
1223 | 1194 | _single_line_single = re.compile(r"^'''(.*?)'''\s*(#.*)?$") |
|
1224 | 1195 | _single_line_double = re.compile(r'^"""(.*?)"""\s*(#.*)?$') |
|
1225 | 1196 | _multi_line_single = re.compile(r"^(.*?)'''\s*(#.*)?$") |
|
1226 | 1197 | _multi_line_double = re.compile(r'^(.*?)"""\s*(#.*)?$') |
|
1227 | 1198 | |
|
1228 | 1199 | _triple_quote = { |
|
1229 | 1200 | "'''": (_single_line_single, _multi_line_single), |
|
1230 | 1201 | '"""': (_single_line_double, _multi_line_double), |
|
1231 | 1202 | } |
|
1232 | 1203 | |
|
1233 | 1204 | # Used by the ``istrue`` Section method |
|
1234 | 1205 | _bools = { |
|
1235 | 1206 | 'yes': True, 'no': False, |
|
1236 | 1207 | 'on': True, 'off': False, |
|
1237 | 1208 | '1': True, '0': False, |
|
1238 | 1209 | 'true': True, 'false': False, |
|
1239 | 1210 | } |
|
1240 | 1211 | |
|
1241 | 1212 | |
|
1242 | 1213 | def __init__(self, infile=None, options=None, **kwargs): |
|
1243 | 1214 | """ |
|
1244 | 1215 | Parse a config file or create a config file object. |
|
1245 | 1216 | |
|
1246 | 1217 | ``ConfigObj(infile=None, options=None, **kwargs)`` |
|
1247 | 1218 | """ |
|
1248 | 1219 | # init the superclass |
|
1249 | 1220 | Section.__init__(self, self, 0, self) |
|
1250 | 1221 | |
|
1251 | 1222 | if infile is None: |
|
1252 | 1223 | infile = [] |
|
1253 | 1224 | if options is None: |
|
1254 | 1225 | options = {} |
|
1255 | 1226 | else: |
|
1256 | 1227 | options = dict(options) |
|
1257 | 1228 | |
|
1258 | 1229 | # keyword arguments take precedence over an options dictionary |
|
1259 | 1230 | options.update(kwargs) |
|
1260 | 1231 | |
|
1261 | 1232 | defaults = OPTION_DEFAULTS.copy() |
|
1262 | 1233 | # TODO: check the values too. |
|
1263 | 1234 | for entry in options: |
|
1264 | 1235 | if entry not in defaults: |
|
1265 | 1236 | raise TypeError('Unrecognised option "%s".' % entry) |
|
1266 | 1237 | |
|
1267 | 1238 | # Add any explicit options to the defaults |
|
1268 | 1239 | defaults.update(options) |
|
1269 | 1240 | self._initialise(defaults) |
|
1270 | 1241 | configspec = defaults['configspec'] |
|
1271 | 1242 | self._original_configspec = configspec |
|
1272 | 1243 | self._load(infile, configspec) |
|
1273 | 1244 | |
|
1274 | 1245 | |
|
1275 | 1246 | def _load(self, infile, configspec): |
|
1276 | 1247 | if isinstance(infile, StringTypes): |
|
1277 | 1248 | self.filename = infile |
|
1278 | 1249 | if os.path.isfile(infile): |
|
1279 | 1250 | h = open(infile, 'rb') |
|
1280 | 1251 | infile = h.read() or [] |
|
1281 | 1252 | h.close() |
|
1282 | 1253 | elif self.file_error: |
|
1283 | 1254 | # raise an error if the file doesn't exist |
|
1284 | 1255 | raise IOError('Config file not found: "%s".' % self.filename) |
|
1285 | 1256 | else: |
|
1286 | 1257 | # file doesn't already exist |
|
1287 | 1258 | if self.create_empty: |
|
1288 | 1259 | # this is a good test that the filename specified |
|
1289 | 1260 | # isn't impossible - like on a non-existent device |
|
1290 | 1261 | h = open(infile, 'w') |
|
1291 | 1262 | h.write('') |
|
1292 | 1263 | h.close() |
|
1293 | 1264 | infile = [] |
|
1294 | 1265 | |
|
1295 | 1266 | elif isinstance(infile, (list, tuple)): |
|
1296 | 1267 | infile = list(infile) |
|
1297 | 1268 | |
|
1298 | 1269 | elif isinstance(infile, dict): |
|
1299 | 1270 | # initialise self |
|
1300 | 1271 | # the Section class handles creating subsections |
|
1301 | 1272 | if isinstance(infile, ConfigObj): |
|
1302 | 1273 | # get a copy of our ConfigObj |
|
1303 | 1274 | infile = infile.dict() |
|
1304 | 1275 | |
|
1305 | 1276 | for entry in infile: |
|
1306 | 1277 | self[entry] = infile[entry] |
|
1307 | 1278 | del self._errors |
|
1308 | 1279 | |
|
1309 | 1280 | if configspec is not None: |
|
1310 | 1281 | self._handle_configspec(configspec) |
|
1311 | 1282 | else: |
|
1312 | 1283 | self.configspec = None |
|
1313 | 1284 | return |
|
1314 | 1285 | |
|
1315 | 1286 | elif hasattr(infile, 'read'): |
|
1316 | 1287 | # This supports file like objects |
|
1317 | 1288 | infile = infile.read() or [] |
|
1318 | 1289 | # needs splitting into lines - but needs doing *after* decoding |
|
1319 | 1290 | # in case it's not an 8 bit encoding |
|
1320 | 1291 | else: |
|
1321 | 1292 | raise TypeError('infile must be a filename, file like object, or list of lines.') |
|
1322 | 1293 | |
|
1323 | 1294 | if infile: |
|
1324 | 1295 | # don't do it for the empty ConfigObj |
|
1325 | 1296 | infile = self._handle_bom(infile) |
|
1326 | 1297 | # infile is now *always* a list |
|
1327 | 1298 | # |
|
1328 | 1299 | # Set the newlines attribute (first line ending it finds) |
|
1329 | 1300 | # and strip trailing '\n' or '\r' from lines |
|
1330 | 1301 | for line in infile: |
|
1331 | 1302 | if (not line) or (line[-1] not in ('\r', '\n', '\r\n')): |
|
1332 | 1303 | continue |
|
1333 | 1304 | for end in ('\r\n', '\n', '\r'): |
|
1334 | 1305 | if line.endswith(end): |
|
1335 | 1306 | self.newlines = end |
|
1336 | 1307 | break |
|
1337 | 1308 | break |
|
1338 | 1309 | |
|
1339 | 1310 | infile = [line.rstrip('\r\n') for line in infile] |
|
1340 | 1311 | |
|
1341 | 1312 | self._parse(infile) |
|
1342 | 1313 | # if we had any errors, now is the time to raise them |
|
1343 | 1314 | if self._errors: |
|
1344 | 1315 | info = "at line %s." % self._errors[0].line_number |
|
1345 | 1316 | if len(self._errors) > 1: |
|
1346 | 1317 | msg = "Parsing failed with several errors.\nFirst error %s" % info |
|
1347 | 1318 | error = ConfigObjError(msg) |
|
1348 | 1319 | else: |
|
1349 | 1320 | error = self._errors[0] |
|
1350 | 1321 | # set the errors attribute; it's a list of tuples: |
|
1351 | 1322 | # (error_type, message, line_number) |
|
1352 | 1323 | error.errors = self._errors |
|
1353 | 1324 | # set the config attribute |
|
1354 | 1325 | error.config = self |
|
1355 | 1326 | raise error |
|
1356 | 1327 | # delete private attributes |
|
1357 | 1328 | del self._errors |
|
1358 | 1329 | |
|
1359 | 1330 | if configspec is None: |
|
1360 | 1331 | self.configspec = None |
|
1361 | 1332 | else: |
|
1362 | 1333 | self._handle_configspec(configspec) |
|
1363 | 1334 | |
|
1364 | 1335 | |
|
1365 | 1336 | def _initialise(self, options=None): |
|
1366 | 1337 | if options is None: |
|
1367 | 1338 | options = OPTION_DEFAULTS |
|
1368 | 1339 | |
|
1369 | 1340 | # initialise a few variables |
|
1370 | 1341 | self.filename = None |
|
1371 | 1342 | self._errors = [] |
|
1372 | 1343 | self.raise_errors = options['raise_errors'] |
|
1373 | 1344 | self.interpolation = options['interpolation'] |
|
1374 | 1345 | self.list_values = options['list_values'] |
|
1375 | 1346 | self.create_empty = options['create_empty'] |
|
1376 | 1347 | self.file_error = options['file_error'] |
|
1377 | 1348 | self.stringify = options['stringify'] |
|
1378 | 1349 | self.indent_type = options['indent_type'] |
|
1379 | 1350 | self.encoding = options['encoding'] |
|
1380 | 1351 | self.default_encoding = options['default_encoding'] |
|
1381 | 1352 | self.BOM = False |
|
1382 | 1353 | self.newlines = None |
|
1383 | 1354 | self.write_empty_values = options['write_empty_values'] |
|
1384 | 1355 | self.unrepr = options['unrepr'] |
|
1385 | 1356 | |
|
1386 | 1357 | self.initial_comment = [] |
|
1387 | 1358 | self.final_comment = [] |
|
1388 | 1359 | self.configspec = {} |
|
1389 | 1360 | |
|
1390 | 1361 | # Clear section attributes as well |
|
1391 | 1362 | Section._initialise(self) |
|
1392 | 1363 | |
|
1393 | 1364 | |
|
1394 | 1365 | def __repr__(self): |
|
1395 | 1366 | return ('ConfigObj({%s})' % |
|
1396 | 1367 | ', '.join([('%s: %s' % (repr(key), repr(self[key]))) |
|
1397 | 1368 | for key in (self.scalars + self.sections)])) |
|
1398 | 1369 | |
|
1399 | 1370 | |
|
1400 | 1371 | def _handle_bom(self, infile): |
|
1401 | 1372 | """ |
|
1402 | 1373 | Handle any BOM, and decode if necessary. |
|
1403 | 1374 | |
|
1404 | 1375 | If an encoding is specified, that *must* be used - but the BOM should |
|
1405 | 1376 | still be removed (and the BOM attribute set). |
|
1406 | 1377 | |
|
1407 | 1378 | (If the encoding is wrongly specified, then a BOM for an alternative |
|
1408 | 1379 | encoding won't be discovered or removed.) |
|
1409 | 1380 | |
|
1410 | 1381 | If an encoding is not specified, UTF8 or UTF16 BOM will be detected and |
|
1411 | 1382 | removed. The BOM attribute will be set. UTF16 will be decoded to |
|
1412 | 1383 | unicode. |
|
1413 | 1384 | |
|
1414 | 1385 | NOTE: This method must not be called with an empty ``infile``. |
|
1415 | 1386 | |
|
1416 | 1387 | Specifying the *wrong* encoding is likely to cause a |
|
1417 | 1388 | ``UnicodeDecodeError``. |
|
1418 | 1389 | |
|
1419 | 1390 | ``infile`` must always be returned as a list of lines, but may be |
|
1420 | 1391 | passed in as a single string. |
|
1421 | 1392 | """ |
|
1422 | 1393 | if ((self.encoding is not None) and |
|
1423 | 1394 | (self.encoding.lower() not in BOM_LIST)): |
|
1424 | 1395 | # No need to check for a BOM |
|
1425 | 1396 | # the encoding specified doesn't have one |
|
1426 | 1397 | # just decode |
|
1427 | 1398 | return self._decode(infile, self.encoding) |
|
1428 | 1399 | |
|
1429 | 1400 | if isinstance(infile, (list, tuple)): |
|
1430 | 1401 | line = infile[0] |
|
1431 | 1402 | else: |
|
1432 | 1403 | line = infile |
|
1433 | 1404 | if self.encoding is not None: |
|
1434 | 1405 | # encoding explicitly supplied |
|
1435 | 1406 | # And it could have an associated BOM |
|
1436 | 1407 | # TODO: if encoding is just UTF16 - we ought to check for both |
|
1437 | 1408 | # TODO: big endian and little endian versions. |
|
1438 | 1409 | enc = BOM_LIST[self.encoding.lower()] |
|
1439 | 1410 | if enc == 'utf_16': |
|
1440 | 1411 | # For UTF16 we try big endian and little endian |
|
1441 | for BOM, (encoding, final_encoding) in BOMS.items(): | |
|
1412 | for BOM, (encoding, final_encoding) in BOMS.iteritems(): | |
|
1442 | 1413 | if not final_encoding: |
|
1443 | 1414 | # skip UTF8 |
|
1444 | 1415 | continue |
|
1445 | 1416 | if infile.startswith(BOM): |
|
1446 | 1417 | ### BOM discovered |
|
1447 | 1418 | ##self.BOM = True |
|
1448 | 1419 | # Don't need to remove BOM |
|
1449 | 1420 | return self._decode(infile, encoding) |
|
1450 | 1421 | |
|
1451 | 1422 | # If we get this far, will *probably* raise a DecodeError |
|
1452 | 1423 | # As it doesn't appear to start with a BOM |
|
1453 | 1424 | return self._decode(infile, self.encoding) |
|
1454 | 1425 | |
|
1455 | 1426 | # Must be UTF8 |
|
1456 | 1427 | BOM = BOM_SET[enc] |
|
1457 | 1428 | if not line.startswith(BOM): |
|
1458 | 1429 | return self._decode(infile, self.encoding) |
|
1459 | 1430 | |
|
1460 | 1431 | newline = line[len(BOM):] |
|
1461 | 1432 | |
|
1462 | 1433 | # BOM removed |
|
1463 | 1434 | if isinstance(infile, (list, tuple)): |
|
1464 | 1435 | infile[0] = newline |
|
1465 | 1436 | else: |
|
1466 | 1437 | infile = newline |
|
1467 | 1438 | self.BOM = True |
|
1468 | 1439 | return self._decode(infile, self.encoding) |
|
1469 | 1440 | |
|
1470 | 1441 | # No encoding specified - so we need to check for UTF8/UTF16 |
|
1471 | for BOM, (encoding, final_encoding) in BOMS.items(): | |
|
1442 | for BOM, (encoding, final_encoding) in BOMS.iteritems(): | |
|
1472 | 1443 | if not line.startswith(BOM): |
|
1473 | 1444 | continue |
|
1474 | 1445 | else: |
|
1475 | 1446 | # BOM discovered |
|
1476 | 1447 | self.encoding = final_encoding |
|
1477 | 1448 | if not final_encoding: |
|
1478 | 1449 | self.BOM = True |
|
1479 | 1450 | # UTF8 |
|
1480 | 1451 | # remove BOM |
|
1481 | 1452 | newline = line[len(BOM):] |
|
1482 | 1453 | if isinstance(infile, (list, tuple)): |
|
1483 | 1454 | infile[0] = newline |
|
1484 | 1455 | else: |
|
1485 | 1456 | infile = newline |
|
1486 | 1457 | # UTF8 - don't decode |
|
1487 | 1458 | if isinstance(infile, StringTypes): |
|
1488 | 1459 | return infile.splitlines(True) |
|
1489 | 1460 | else: |
|
1490 | 1461 | return infile |
|
1491 | 1462 | # UTF16 - have to decode |
|
1492 | 1463 | return self._decode(infile, encoding) |
|
1493 | 1464 | |
|
1494 | 1465 | # No BOM discovered and no encoding specified, just return |
|
1495 | 1466 | if isinstance(infile, StringTypes): |
|
1496 | 1467 | # infile read from a file will be a single string |
|
1497 | 1468 | return infile.splitlines(True) |
|
1498 | 1469 | return infile |
|
1499 | 1470 | |
|
1500 | 1471 | |
|
1501 | 1472 | def _a_to_u(self, aString): |
|
1502 | 1473 | """Decode ASCII strings to unicode if a self.encoding is specified.""" |
|
1503 | 1474 | if self.encoding: |
|
1504 | 1475 | return aString.decode('ascii') |
|
1505 | 1476 | else: |
|
1506 | 1477 | return aString |
|
1507 | 1478 | |
|
1508 | 1479 | |
|
1509 | 1480 | def _decode(self, infile, encoding): |
|
1510 | 1481 | """ |
|
1511 | 1482 | Decode infile to unicode. Using the specified encoding. |
|
1512 | 1483 | |
|
1513 | 1484 | if is a string, it also needs converting to a list. |
|
1514 | 1485 | """ |
|
1515 | 1486 | if isinstance(infile, StringTypes): |
|
1516 | 1487 | # can't be unicode |
|
1517 | 1488 | # NOTE: Could raise a ``UnicodeDecodeError`` |
|
1518 | 1489 | return infile.decode(encoding).splitlines(True) |
|
1519 | 1490 | for i, line in enumerate(infile): |
|
1520 | 1491 | if not isinstance(line, unicode): |
|
1521 | 1492 | # NOTE: The isinstance test here handles mixed lists of unicode/string |
|
1522 | 1493 | # NOTE: But the decode will break on any non-string values |
|
1523 | 1494 | # NOTE: Or could raise a ``UnicodeDecodeError`` |
|
1524 | 1495 | infile[i] = line.decode(encoding) |
|
1525 | 1496 | return infile |
|
1526 | 1497 | |
|
1527 | 1498 | |
|
1528 | 1499 | def _decode_element(self, line): |
|
1529 | 1500 | """Decode element to unicode if necessary.""" |
|
1530 | 1501 | if not self.encoding: |
|
1531 | 1502 | return line |
|
1532 | 1503 | if isinstance(line, str) and self.default_encoding: |
|
1533 | 1504 | return line.decode(self.default_encoding) |
|
1534 | 1505 | return line |
|
1535 | 1506 | |
|
1536 | 1507 | |
|
1537 | 1508 | def _str(self, value): |
|
1538 | 1509 | """ |
|
1539 | 1510 | Used by ``stringify`` within validate, to turn non-string values |
|
1540 | 1511 | into strings. |
|
1541 | 1512 | """ |
|
1542 | 1513 | if not isinstance(value, StringTypes): |
|
1543 | 1514 | return str(value) |
|
1544 | 1515 | else: |
|
1545 | 1516 | return value |
|
1546 | 1517 | |
|
1547 | 1518 | |
|
1548 | 1519 | def _parse(self, infile): |
|
1549 | 1520 | """Actually parse the config file.""" |
|
1550 | 1521 | temp_list_values = self.list_values |
|
1551 | 1522 | if self.unrepr: |
|
1552 | 1523 | self.list_values = False |
|
1553 | 1524 | |
|
1554 | 1525 | comment_list = [] |
|
1555 | 1526 | done_start = False |
|
1556 | 1527 | this_section = self |
|
1557 | 1528 | maxline = len(infile) - 1 |
|
1558 | 1529 | cur_index = -1 |
|
1559 | 1530 | reset_comment = False |
|
1560 | 1531 | |
|
1561 | 1532 | while cur_index < maxline: |
|
1562 | 1533 | if reset_comment: |
|
1563 | 1534 | comment_list = [] |
|
1564 | 1535 | cur_index += 1 |
|
1565 | 1536 | line = infile[cur_index] |
|
1566 | 1537 | sline = line.strip() |
|
1567 | 1538 | # do we have anything on the line ? |
|
1568 | 1539 | if not sline or sline.startswith('#'): |
|
1569 | 1540 | reset_comment = False |
|
1570 | 1541 | comment_list.append(line) |
|
1571 | 1542 | continue |
|
1572 | 1543 | |
|
1573 | 1544 | if not done_start: |
|
1574 | 1545 | # preserve initial comment |
|
1575 | 1546 | self.initial_comment = comment_list |
|
1576 | 1547 | comment_list = [] |
|
1577 | 1548 | done_start = True |
|
1578 | 1549 | |
|
1579 | 1550 | reset_comment = True |
|
1580 | 1551 | # first we check if it's a section marker |
|
1581 | 1552 | mat = self._sectionmarker.match(line) |
|
1582 | 1553 | if mat is not None: |
|
1583 | 1554 | # is a section line |
|
1584 | 1555 | (indent, sect_open, sect_name, sect_close, comment) = mat.groups() |
|
1585 | 1556 | if indent and (self.indent_type is None): |
|
1586 | 1557 | self.indent_type = indent |
|
1587 | 1558 | cur_depth = sect_open.count('[') |
|
1588 | 1559 | if cur_depth != sect_close.count(']'): |
|
1589 | 1560 | self._handle_error("Cannot compute the section depth at line %s.", |
|
1590 | 1561 | NestingError, infile, cur_index) |
|
1591 | 1562 | continue |
|
1592 | 1563 | |
|
1593 | 1564 | if cur_depth < this_section.depth: |
|
1594 | 1565 | # the new section is dropping back to a previous level |
|
1595 | 1566 | try: |
|
1596 | 1567 | parent = self._match_depth(this_section, |
|
1597 | 1568 | cur_depth).parent |
|
1598 | 1569 | except SyntaxError: |
|
1599 | 1570 | self._handle_error("Cannot compute nesting level at line %s.", |
|
1600 | 1571 | NestingError, infile, cur_index) |
|
1601 | 1572 | continue |
|
1602 | 1573 | elif cur_depth == this_section.depth: |
|
1603 | 1574 | # the new section is a sibling of the current section |
|
1604 | 1575 | parent = this_section.parent |
|
1605 | 1576 | elif cur_depth == this_section.depth + 1: |
|
1606 | 1577 | # the new section is a child the current section |
|
1607 | 1578 | parent = this_section |
|
1608 | 1579 | else: |
|
1609 | 1580 | self._handle_error("Section too nested at line %s.", |
|
1610 | 1581 | NestingError, infile, cur_index) |
|
1611 | 1582 | |
|
1612 | 1583 | sect_name = self._unquote(sect_name) |
|
1613 | 1584 | if parent.has_key(sect_name): |
|
1614 | 1585 | self._handle_error('Duplicate section name at line %s.', |
|
1615 | 1586 | DuplicateError, infile, cur_index) |
|
1616 | 1587 | continue |
|
1617 | 1588 | |
|
1618 | 1589 | # create the new section |
|
1619 | 1590 | this_section = Section( |
|
1620 | 1591 | parent, |
|
1621 | 1592 | cur_depth, |
|
1622 | 1593 | self, |
|
1623 | 1594 | name=sect_name) |
|
1624 | 1595 | parent[sect_name] = this_section |
|
1625 | 1596 | parent.inline_comments[sect_name] = comment |
|
1626 | 1597 | parent.comments[sect_name] = comment_list |
|
1627 | 1598 | continue |
|
1628 | 1599 | # |
|
1629 | 1600 | # it's not a section marker, |
|
1630 | 1601 | # so it should be a valid ``key = value`` line |
|
1631 | 1602 | mat = self._keyword.match(line) |
|
1632 | 1603 | if mat is None: |
|
1633 | 1604 | # it neither matched as a keyword |
|
1634 | 1605 | # or a section marker |
|
1635 | 1606 | self._handle_error( |
|
1636 | 1607 | 'Invalid line at line "%s".', |
|
1637 | 1608 | ParseError, infile, cur_index) |
|
1638 | 1609 | else: |
|
1639 | 1610 | # is a keyword value |
|
1640 | 1611 | # value will include any inline comment |
|
1641 | 1612 | (indent, key, value) = mat.groups() |
|
1642 | 1613 | if indent and (self.indent_type is None): |
|
1643 | 1614 | self.indent_type = indent |
|
1644 | 1615 | # check for a multiline value |
|
1645 | 1616 | if value[:3] in ['"""', "'''"]: |
|
1646 | 1617 | try: |
|
1647 | 1618 | (value, comment, cur_index) = self._multiline( |
|
1648 | 1619 | value, infile, cur_index, maxline) |
|
1649 | 1620 | except SyntaxError: |
|
1650 | 1621 | self._handle_error( |
|
1651 | 1622 | 'Parse error in value at line %s.', |
|
1652 | 1623 | ParseError, infile, cur_index) |
|
1653 | 1624 | continue |
|
1654 | 1625 | else: |
|
1655 | 1626 | if self.unrepr: |
|
1656 | 1627 | comment = '' |
|
1657 | 1628 | try: |
|
1658 | 1629 | value = unrepr(value) |
|
1659 | 1630 | except Exception, e: |
|
1660 | 1631 | if type(e) == UnknownType: |
|
1661 | 1632 | msg = 'Unknown name or type in value at line %s.' |
|
1662 | 1633 | else: |
|
1663 | 1634 | msg = 'Parse error in value at line %s.' |
|
1664 | 1635 | self._handle_error(msg, UnreprError, infile, |
|
1665 | 1636 | cur_index) |
|
1666 | 1637 | continue |
|
1667 | 1638 | else: |
|
1668 | 1639 | if self.unrepr: |
|
1669 | 1640 | comment = '' |
|
1670 | 1641 | try: |
|
1671 | 1642 | value = unrepr(value) |
|
1672 | 1643 | except Exception, e: |
|
1673 | 1644 | if isinstance(e, UnknownType): |
|
1674 | 1645 | msg = 'Unknown name or type in value at line %s.' |
|
1675 | 1646 | else: |
|
1676 | 1647 | msg = 'Parse error in value at line %s.' |
|
1677 | 1648 | self._handle_error(msg, UnreprError, infile, |
|
1678 | 1649 | cur_index) |
|
1679 | 1650 | continue |
|
1680 | 1651 | else: |
|
1681 | 1652 | # extract comment and lists |
|
1682 | 1653 | try: |
|
1683 | 1654 | (value, comment) = self._handle_value(value) |
|
1684 | 1655 | except SyntaxError: |
|
1685 | 1656 | self._handle_error( |
|
1686 | 1657 | 'Parse error in value at line %s.', |
|
1687 | 1658 | ParseError, infile, cur_index) |
|
1688 | 1659 | continue |
|
1689 | 1660 | # |
|
1690 | 1661 | key = self._unquote(key) |
|
1691 | 1662 | if this_section.has_key(key): |
|
1692 | 1663 | self._handle_error( |
|
1693 | 1664 | 'Duplicate keyword name at line %s.', |
|
1694 | 1665 | DuplicateError, infile, cur_index) |
|
1695 | 1666 | continue |
|
1696 | 1667 | # add the key. |
|
1697 | 1668 | # we set unrepr because if we have got this far we will never |
|
1698 | 1669 | # be creating a new section |
|
1699 | 1670 | this_section.__setitem__(key, value, unrepr=True) |
|
1700 | 1671 | this_section.inline_comments[key] = comment |
|
1701 | 1672 | this_section.comments[key] = comment_list |
|
1702 | 1673 | continue |
|
1703 | 1674 | # |
|
1704 | 1675 | if self.indent_type is None: |
|
1705 | 1676 | # no indentation used, set the type accordingly |
|
1706 | 1677 | self.indent_type = '' |
|
1707 | 1678 | |
|
1708 | 1679 | # preserve the final comment |
|
1709 | 1680 | if not self and not self.initial_comment: |
|
1710 | 1681 | self.initial_comment = comment_list |
|
1711 | 1682 | elif not reset_comment: |
|
1712 | 1683 | self.final_comment = comment_list |
|
1713 | 1684 | self.list_values = temp_list_values |
|
1714 | 1685 | |
|
1715 | 1686 | |
|
1716 | 1687 | def _match_depth(self, sect, depth): |
|
1717 | 1688 | """ |
|
1718 | 1689 | Given a section and a depth level, walk back through the sections |
|
1719 | 1690 | parents to see if the depth level matches a previous section. |
|
1720 | 1691 | |
|
1721 | 1692 | Return a reference to the right section, |
|
1722 | 1693 | or raise a SyntaxError. |
|
1723 | 1694 | """ |
|
1724 | 1695 | while depth < sect.depth: |
|
1725 | 1696 | if sect is sect.parent: |
|
1726 | 1697 | # we've reached the top level already |
|
1727 | 1698 | raise SyntaxError() |
|
1728 | 1699 | sect = sect.parent |
|
1729 | 1700 | if sect.depth == depth: |
|
1730 | 1701 | return sect |
|
1731 | 1702 | # shouldn't get here |
|
1732 | 1703 | raise SyntaxError() |
|
1733 | 1704 | |
|
1734 | 1705 | |
|
1735 | 1706 | def _handle_error(self, text, ErrorClass, infile, cur_index): |
|
1736 | 1707 | """ |
|
1737 | 1708 | Handle an error according to the error settings. |
|
1738 | 1709 | |
|
1739 | 1710 | Either raise the error or store it. |
|
1740 | 1711 | The error will have occured at ``cur_index`` |
|
1741 | 1712 | """ |
|
1742 | 1713 | line = infile[cur_index] |
|
1743 | 1714 | cur_index += 1 |
|
1744 | 1715 | message = text % cur_index |
|
1745 | 1716 | error = ErrorClass(message, cur_index, line) |
|
1746 | 1717 | if self.raise_errors: |
|
1747 | 1718 | # raise the error - parsing stops here |
|
1748 | 1719 | raise error |
|
1749 | 1720 | # store the error |
|
1750 | 1721 | # reraise when parsing has finished |
|
1751 | 1722 | self._errors.append(error) |
|
1752 | 1723 | |
|
1753 | 1724 | |
|
1754 | 1725 | def _unquote(self, value): |
|
1755 | 1726 | """Return an unquoted version of a value""" |
|
1756 | 1727 | if (value[0] == value[-1]) and (value[0] in ('"', "'")): |
|
1757 | 1728 | value = value[1:-1] |
|
1758 | 1729 | return value |
|
1759 | 1730 | |
|
1760 | 1731 | |
|
1761 | 1732 | def _quote(self, value, multiline=True): |
|
1762 | 1733 | """ |
|
1763 | 1734 | Return a safely quoted version of a value. |
|
1764 | 1735 | |
|
1765 | 1736 | Raise a ConfigObjError if the value cannot be safely quoted. |
|
1766 | 1737 | If multiline is ``True`` (default) then use triple quotes |
|
1767 | 1738 | if necessary. |
|
1768 | 1739 | |
|
1769 | 1740 | Don't quote values that don't need it. |
|
1770 | 1741 | Recursively quote members of a list and return a comma joined list. |
|
1771 | 1742 | Multiline is ``False`` for lists. |
|
1772 | 1743 | Obey list syntax for empty and single member lists. |
|
1773 | 1744 | |
|
1774 | 1745 | If ``list_values=False`` then the value is only quoted if it contains |
|
1775 | 1746 | a ``\n`` (is multiline) or '#'. |
|
1776 | 1747 | |
|
1777 | 1748 | If ``write_empty_values`` is set, and the value is an empty string, it |
|
1778 | 1749 | won't be quoted. |
|
1779 | 1750 | """ |
|
1780 | 1751 | if multiline and self.write_empty_values and value == '': |
|
1781 | 1752 | # Only if multiline is set, so that it is used for values not |
|
1782 | 1753 | # keys, and not values that are part of a list |
|
1783 | 1754 | return '' |
|
1784 | 1755 | |
|
1785 | 1756 | if multiline and isinstance(value, (list, tuple)): |
|
1786 | 1757 | if not value: |
|
1787 | 1758 | return ',' |
|
1788 | 1759 | elif len(value) == 1: |
|
1789 | 1760 | return self._quote(value[0], multiline=False) + ',' |
|
1790 | 1761 | return ', '.join([self._quote(val, multiline=False) |
|
1791 | 1762 | for val in value]) |
|
1792 | 1763 | if not isinstance(value, StringTypes): |
|
1793 | 1764 | if self.stringify: |
|
1794 | 1765 | value = str(value) |
|
1795 | 1766 | else: |
|
1796 | 1767 | raise TypeError('Value "%s" is not a string.' % value) |
|
1797 | 1768 | |
|
1798 | 1769 | if not value: |
|
1799 | 1770 | return '""' |
|
1800 | 1771 | |
|
1801 | 1772 | no_lists_no_quotes = not self.list_values and '\n' not in value and '#' not in value |
|
1802 | 1773 | need_triple = multiline and ((("'" in value) and ('"' in value)) or ('\n' in value )) |
|
1803 | 1774 | hash_triple_quote = multiline and not need_triple and ("'" in value) and ('"' in value) and ('#' in value) |
|
1804 | 1775 | check_for_single = (no_lists_no_quotes or not need_triple) and not hash_triple_quote |
|
1805 | 1776 | |
|
1806 | 1777 | if check_for_single: |
|
1807 | 1778 | if not self.list_values: |
|
1808 | 1779 | # we don't quote if ``list_values=False`` |
|
1809 | 1780 | quot = noquot |
|
1810 | 1781 | # for normal values either single or double quotes will do |
|
1811 | 1782 | elif '\n' in value: |
|
1812 | 1783 | # will only happen if multiline is off - e.g. '\n' in key |
|
1813 | 1784 | raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) |
|
1814 | 1785 | elif ((value[0] not in wspace_plus) and |
|
1815 | 1786 | (value[-1] not in wspace_plus) and |
|
1816 | 1787 | (',' not in value)): |
|
1817 | 1788 | quot = noquot |
|
1818 | 1789 | else: |
|
1819 | 1790 | quot = self._get_single_quote(value) |
|
1820 | 1791 | else: |
|
1821 | 1792 | # if value has '\n' or "'" *and* '"', it will need triple quotes |
|
1822 | 1793 | quot = self._get_triple_quote(value) |
|
1823 | 1794 | |
|
1824 | 1795 | if quot == noquot and '#' in value and self.list_values: |
|
1825 | 1796 | quot = self._get_single_quote(value) |
|
1826 | 1797 | |
|
1827 | 1798 | return quot % value |
|
1828 | 1799 | |
|
1829 | 1800 | |
|
1830 | 1801 | def _get_single_quote(self, value): |
|
1831 | 1802 | if ("'" in value) and ('"' in value): |
|
1832 | 1803 | raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) |
|
1833 | 1804 | elif '"' in value: |
|
1834 | 1805 | quot = squot |
|
1835 | 1806 | else: |
|
1836 | 1807 | quot = dquot |
|
1837 | 1808 | return quot |
|
1838 | 1809 | |
|
1839 | 1810 | |
|
1840 | 1811 | def _get_triple_quote(self, value): |
|
1841 | 1812 | if (value.find('"""') != -1) and (value.find("'''") != -1): |
|
1842 | 1813 | raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) |
|
1843 | 1814 | if value.find('"""') == -1: |
|
1844 | 1815 | quot = tdquot |
|
1845 | 1816 | else: |
|
1846 | 1817 | quot = tsquot |
|
1847 | 1818 | return quot |
|
1848 | 1819 | |
|
1849 | 1820 | |
|
1850 | 1821 | def _handle_value(self, value): |
|
1851 | 1822 | """ |
|
1852 | 1823 | Given a value string, unquote, remove comment, |
|
1853 | 1824 | handle lists. (including empty and single member lists) |
|
1854 | 1825 | """ |
|
1855 | 1826 | # do we look for lists in values ? |
|
1856 | 1827 | if not self.list_values: |
|
1857 | 1828 | mat = self._nolistvalue.match(value) |
|
1858 | 1829 | if mat is None: |
|
1859 | 1830 | raise SyntaxError() |
|
1860 | 1831 | # NOTE: we don't unquote here |
|
1861 | 1832 | return mat.groups() |
|
1862 | 1833 | # |
|
1863 | 1834 | mat = self._valueexp.match(value) |
|
1864 | 1835 | if mat is None: |
|
1865 | 1836 | # the value is badly constructed, probably badly quoted, |
|
1866 | 1837 | # or an invalid list |
|
1867 | 1838 | raise SyntaxError() |
|
1868 | 1839 | (list_values, single, empty_list, comment) = mat.groups() |
|
1869 | 1840 | if (list_values == '') and (single is None): |
|
1870 | 1841 | # change this if you want to accept empty values |
|
1871 | 1842 | raise SyntaxError() |
|
1872 | 1843 | # NOTE: note there is no error handling from here if the regex |
|
1873 | 1844 | # is wrong: then incorrect values will slip through |
|
1874 | 1845 | if empty_list is not None: |
|
1875 | 1846 | # the single comma - meaning an empty list |
|
1876 | 1847 | return ([], comment) |
|
1877 | 1848 | if single is not None: |
|
1878 | 1849 | # handle empty values |
|
1879 | 1850 | if list_values and not single: |
|
1880 | 1851 | # FIXME: the '' is a workaround because our regex now matches |
|
1881 | 1852 | # '' at the end of a list if it has a trailing comma |
|
1882 | 1853 | single = None |
|
1883 | 1854 | else: |
|
1884 | 1855 | single = single or '""' |
|
1885 | 1856 | single = self._unquote(single) |
|
1886 | 1857 | if list_values == '': |
|
1887 | 1858 | # not a list value |
|
1888 | 1859 | return (single, comment) |
|
1889 | 1860 | the_list = self._listvalueexp.findall(list_values) |
|
1890 | 1861 | the_list = [self._unquote(val) for val in the_list] |
|
1891 | 1862 | if single is not None: |
|
1892 | 1863 | the_list += [single] |
|
1893 | 1864 | return (the_list, comment) |
|
1894 | 1865 | |
|
1895 | 1866 | |
|
1896 | 1867 | def _multiline(self, value, infile, cur_index, maxline): |
|
1897 | 1868 | """Extract the value, where we are in a multiline situation.""" |
|
1898 | 1869 | quot = value[:3] |
|
1899 | 1870 | newvalue = value[3:] |
|
1900 | 1871 | single_line = self._triple_quote[quot][0] |
|
1901 | 1872 | multi_line = self._triple_quote[quot][1] |
|
1902 | 1873 | mat = single_line.match(value) |
|
1903 | 1874 | if mat is not None: |
|
1904 | 1875 | retval = list(mat.groups()) |
|
1905 | 1876 | retval.append(cur_index) |
|
1906 | 1877 | return retval |
|
1907 | 1878 | elif newvalue.find(quot) != -1: |
|
1908 | 1879 | # somehow the triple quote is missing |
|
1909 | 1880 | raise SyntaxError() |
|
1910 | 1881 | # |
|
1911 | 1882 | while cur_index < maxline: |
|
1912 | 1883 | cur_index += 1 |
|
1913 | 1884 | newvalue += '\n' |
|
1914 | 1885 | line = infile[cur_index] |
|
1915 | 1886 | if line.find(quot) == -1: |
|
1916 | 1887 | newvalue += line |
|
1917 | 1888 | else: |
|
1918 | 1889 | # end of multiline, process it |
|
1919 | 1890 | break |
|
1920 | 1891 | else: |
|
1921 | 1892 | # we've got to the end of the config, oops... |
|
1922 | 1893 | raise SyntaxError() |
|
1923 | 1894 | mat = multi_line.match(line) |
|
1924 | 1895 | if mat is None: |
|
1925 | 1896 | # a badly formed line |
|
1926 | 1897 | raise SyntaxError() |
|
1927 | 1898 | (value, comment) = mat.groups() |
|
1928 | 1899 | return (newvalue + value, comment, cur_index) |
|
1929 | 1900 | |
|
1930 | 1901 | |
|
1931 | 1902 | def _handle_configspec(self, configspec): |
|
1932 | 1903 | """Parse the configspec.""" |
|
1933 | 1904 | # FIXME: Should we check that the configspec was created with the |
|
1934 | 1905 | # correct settings ? (i.e. ``list_values=False``) |
|
1935 | 1906 | if not isinstance(configspec, ConfigObj): |
|
1936 | 1907 | try: |
|
1937 | 1908 | configspec = ConfigObj(configspec, |
|
1938 | 1909 | raise_errors=True, |
|
1939 | 1910 | file_error=True, |
|
1940 | 1911 | list_values=False) |
|
1941 | 1912 | except ConfigObjError, e: |
|
1942 | 1913 | # FIXME: Should these errors have a reference |
|
1943 | 1914 | # to the already parsed ConfigObj ? |
|
1944 | 1915 | raise ConfigspecError('Parsing configspec failed: %s' % e) |
|
1945 | 1916 | except IOError, e: |
|
1946 | 1917 | raise IOError('Reading configspec failed: %s' % e) |
|
1947 | 1918 | |
|
1948 | 1919 | self._set_configspec_value(configspec, self) |
|
1949 | 1920 | |
|
1950 | 1921 | |
|
1951 | 1922 | def _set_configspec_value(self, configspec, section): |
|
1952 | 1923 | """Used to recursively set configspec values.""" |
|
1953 | 1924 | if '__many__' in configspec.sections: |
|
1954 | 1925 | section.configspec['__many__'] = configspec['__many__'] |
|
1955 | 1926 | if len(configspec.sections) > 1: |
|
1956 | 1927 | # FIXME: can we supply any useful information here ? |
|
1957 | 1928 | raise RepeatSectionError() |
|
1958 | 1929 | |
|
1959 | 1930 | if hasattr(configspec, 'initial_comment'): |
|
1960 | 1931 | section._configspec_initial_comment = configspec.initial_comment |
|
1961 | 1932 | section._configspec_final_comment = configspec.final_comment |
|
1962 | 1933 | section._configspec_encoding = configspec.encoding |
|
1963 | 1934 | section._configspec_BOM = configspec.BOM |
|
1964 | 1935 | section._configspec_newlines = configspec.newlines |
|
1965 | 1936 | section._configspec_indent_type = configspec.indent_type |
|
1966 | 1937 | |
|
1967 | 1938 | for entry in configspec.scalars: |
|
1968 | 1939 | section._configspec_comments[entry] = configspec.comments[entry] |
|
1969 | 1940 | section._configspec_inline_comments[entry] = configspec.inline_comments[entry] |
|
1970 | 1941 | section.configspec[entry] = configspec[entry] |
|
1971 | 1942 | section._order.append(entry) |
|
1972 | 1943 | |
|
1973 | 1944 | for entry in configspec.sections: |
|
1974 | 1945 | if entry == '__many__': |
|
1975 | 1946 | continue |
|
1976 | 1947 | |
|
1977 | 1948 | section._cs_section_comments[entry] = configspec.comments[entry] |
|
1978 | 1949 | section._cs_section_inline_comments[entry] = configspec.inline_comments[entry] |
|
1979 | 1950 | if not section.has_key(entry): |
|
1980 | 1951 | section[entry] = {} |
|
1981 | 1952 | self._set_configspec_value(configspec[entry], section[entry]) |
|
1982 | 1953 | |
|
1983 | 1954 | |
|
1984 | 1955 | def _handle_repeat(self, section, configspec): |
|
1985 | 1956 | """Dynamically assign configspec for repeated section.""" |
|
1986 | 1957 | try: |
|
1987 | 1958 | section_keys = configspec.sections |
|
1988 | 1959 | scalar_keys = configspec.scalars |
|
1989 | 1960 | except AttributeError: |
|
1990 | 1961 | section_keys = [entry for entry in configspec |
|
1991 | 1962 | if isinstance(configspec[entry], dict)] |
|
1992 | 1963 | scalar_keys = [entry for entry in configspec |
|
1993 | 1964 | if not isinstance(configspec[entry], dict)] |
|
1994 | 1965 | |
|
1995 | 1966 | if '__many__' in section_keys and len(section_keys) > 1: |
|
1996 | 1967 | # FIXME: can we supply any useful information here ? |
|
1997 | 1968 | raise RepeatSectionError() |
|
1998 | 1969 | |
|
1999 | 1970 | scalars = {} |
|
2000 | 1971 | sections = {} |
|
2001 | 1972 | for entry in scalar_keys: |
|
2002 | 1973 | val = configspec[entry] |
|
2003 | 1974 | scalars[entry] = val |
|
2004 | 1975 | for entry in section_keys: |
|
2005 | 1976 | val = configspec[entry] |
|
2006 | 1977 | if entry == '__many__': |
|
2007 | 1978 | scalars[entry] = val |
|
2008 | 1979 | continue |
|
2009 | 1980 | sections[entry] = val |
|
2010 | 1981 | |
|
2011 | 1982 | section.configspec = scalars |
|
2012 | 1983 | for entry in sections: |
|
2013 | 1984 | if not section.has_key(entry): |
|
2014 | 1985 | section[entry] = {} |
|
2015 | 1986 | self._handle_repeat(section[entry], sections[entry]) |
|
2016 | 1987 | |
|
2017 | 1988 | |
|
2018 | 1989 | def _write_line(self, indent_string, entry, this_entry, comment): |
|
2019 | 1990 | """Write an individual line, for the write method""" |
|
2020 | 1991 | # NOTE: the calls to self._quote here handles non-StringType values. |
|
2021 | 1992 | if not self.unrepr: |
|
2022 | 1993 | val = self._decode_element(self._quote(this_entry)) |
|
2023 | 1994 | else: |
|
2024 | 1995 | val = repr(this_entry) |
|
2025 | 1996 | return '%s%s%s%s%s' % (indent_string, |
|
2026 | 1997 | self._decode_element(self._quote(entry, multiline=False)), |
|
2027 | 1998 | self._a_to_u(' = '), |
|
2028 | 1999 | val, |
|
2029 | 2000 | self._decode_element(comment)) |
|
2030 | 2001 | |
|
2031 | 2002 | |
|
2032 | 2003 | def _write_marker(self, indent_string, depth, entry, comment): |
|
2033 | 2004 | """Write a section marker line""" |
|
2034 | 2005 | return '%s%s%s%s%s' % (indent_string, |
|
2035 | 2006 | self._a_to_u('[' * depth), |
|
2036 | 2007 | self._quote(self._decode_element(entry), multiline=False), |
|
2037 | 2008 | self._a_to_u(']' * depth), |
|
2038 | 2009 | self._decode_element(comment)) |
|
2039 | 2010 | |
|
2040 | 2011 | |
|
2041 | 2012 | def _handle_comment(self, comment): |
|
2042 | 2013 | """Deal with a comment.""" |
|
2043 | 2014 | if not comment: |
|
2044 | 2015 | return '' |
|
2045 | 2016 | start = self.indent_type |
|
2046 | 2017 | if not comment.startswith('#'): |
|
2047 | 2018 | start += self._a_to_u(' # ') |
|
2048 | 2019 | return (start + comment) |
|
2049 | 2020 | |
|
2050 | 2021 | |
|
2051 | 2022 | # Public methods |
|
2052 | 2023 | |
|
2053 | 2024 | def write(self, outfile=None, section=None): |
|
2054 | 2025 | """ |
|
2055 | 2026 | Write the current ConfigObj as a file |
|
2056 | 2027 | |
|
2057 | 2028 | tekNico: FIXME: use StringIO instead of real files |
|
2058 | 2029 | |
|
2059 | 2030 | >>> filename = a.filename |
|
2060 | 2031 | >>> a.filename = 'test.ini' |
|
2061 | 2032 | >>> a.write() |
|
2062 | 2033 | >>> a.filename = filename |
|
2063 | 2034 | >>> a == ConfigObj('test.ini', raise_errors=True) |
|
2064 | 2035 | 1 |
|
2065 | 2036 | """ |
|
2066 | 2037 | if self.indent_type is None: |
|
2067 | 2038 | # this can be true if initialised from a dictionary |
|
2068 | 2039 | self.indent_type = DEFAULT_INDENT_TYPE |
|
2069 | 2040 | |
|
2070 | 2041 | out = [] |
|
2071 | 2042 | cs = self._a_to_u('#') |
|
2072 | 2043 | csp = self._a_to_u('# ') |
|
2073 | 2044 | if section is None: |
|
2074 | 2045 | int_val = self.interpolation |
|
2075 | 2046 | self.interpolation = False |
|
2076 | 2047 | section = self |
|
2077 | 2048 | for line in self.initial_comment: |
|
2078 | 2049 | line = self._decode_element(line) |
|
2079 | 2050 | stripped_line = line.strip() |
|
2080 | 2051 | if stripped_line and not stripped_line.startswith(cs): |
|
2081 | 2052 | line = csp + line |
|
2082 | 2053 | out.append(line) |
|
2083 | 2054 | |
|
2084 | 2055 | indent_string = self.indent_type * section.depth |
|
2085 | 2056 | for entry in (section.scalars + section.sections): |
|
2086 | 2057 | if entry in section.defaults: |
|
2087 | 2058 | # don't write out default values |
|
2088 | 2059 | continue |
|
2089 | 2060 | for comment_line in section.comments[entry]: |
|
2090 | 2061 | comment_line = self._decode_element(comment_line.lstrip()) |
|
2091 | 2062 | if comment_line and not comment_line.startswith(cs): |
|
2092 | 2063 | comment_line = csp + comment_line |
|
2093 | 2064 | out.append(indent_string + comment_line) |
|
2094 | 2065 | this_entry = section[entry] |
|
2095 | 2066 | comment = self._handle_comment(section.inline_comments[entry]) |
|
2096 | 2067 | |
|
2097 | 2068 | if isinstance(this_entry, dict): |
|
2098 | 2069 | # a section |
|
2099 | 2070 | out.append(self._write_marker( |
|
2100 | 2071 | indent_string, |
|
2101 | 2072 | this_entry.depth, |
|
2102 | 2073 | entry, |
|
2103 | 2074 | comment)) |
|
2104 | 2075 | out.extend(self.write(section=this_entry)) |
|
2105 | 2076 | else: |
|
2106 | 2077 | out.append(self._write_line( |
|
2107 | 2078 | indent_string, |
|
2108 | 2079 | entry, |
|
2109 | 2080 | this_entry, |
|
2110 | 2081 | comment)) |
|
2111 | 2082 | |
|
2112 | 2083 | if section is self: |
|
2113 | 2084 | for line in self.final_comment: |
|
2114 | 2085 | line = self._decode_element(line) |
|
2115 | 2086 | stripped_line = line.strip() |
|
2116 | 2087 | if stripped_line and not stripped_line.startswith(cs): |
|
2117 | 2088 | line = csp + line |
|
2118 | 2089 | out.append(line) |
|
2119 | 2090 | self.interpolation = int_val |
|
2120 | 2091 | |
|
2121 | 2092 | if section is not self: |
|
2122 | 2093 | return out |
|
2123 | 2094 | |
|
2124 | 2095 | if (self.filename is None) and (outfile is None): |
|
2125 | 2096 | # output a list of lines |
|
2126 | 2097 | # might need to encode |
|
2127 | 2098 | # NOTE: This will *screw* UTF16, each line will start with the BOM |
|
2128 | 2099 | if self.encoding: |
|
2129 | 2100 | out = [l.encode(self.encoding) for l in out] |
|
2130 | 2101 | if (self.BOM and ((self.encoding is None) or |
|
2131 | 2102 | (BOM_LIST.get(self.encoding.lower()) == 'utf_8'))): |
|
2132 | 2103 | # Add the UTF8 BOM |
|
2133 | 2104 | if not out: |
|
2134 | 2105 | out.append('') |
|
2135 | 2106 | out[0] = BOM_UTF8 + out[0] |
|
2136 | 2107 | return out |
|
2137 | 2108 | |
|
2138 | 2109 | # Turn the list to a string, joined with correct newlines |
|
2139 | 2110 | newline = self.newlines or os.linesep |
|
2140 | 2111 | output = self._a_to_u(newline).join(out) |
|
2141 | 2112 | if self.encoding: |
|
2142 | 2113 | output = output.encode(self.encoding) |
|
2143 | 2114 | if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)): |
|
2144 | 2115 | # Add the UTF8 BOM |
|
2145 | 2116 | output = BOM_UTF8 + output |
|
2146 | 2117 | |
|
2147 | 2118 | if not output.endswith(newline): |
|
2148 | 2119 | output += newline |
|
2149 | 2120 | if outfile is not None: |
|
2150 | 2121 | outfile.write(output) |
|
2151 | 2122 | else: |
|
2152 | 2123 | h = open(self.filename, 'wb') |
|
2153 | 2124 | h.write(output) |
|
2154 | 2125 | h.close() |
|
2155 | 2126 | |
|
2156 | 2127 | |
|
2157 | 2128 | def validate(self, validator, preserve_errors=False, copy=False, |
|
2158 | 2129 | section=None): |
|
2159 | 2130 | """ |
|
2160 | 2131 | Test the ConfigObj against a configspec. |
|
2161 | 2132 | |
|
2162 | 2133 | It uses the ``validator`` object from *validate.py*. |
|
2163 | 2134 | |
|
2164 | 2135 | To run ``validate`` on the current ConfigObj, call: :: |
|
2165 | 2136 | |
|
2166 | 2137 | test = config.validate(validator) |
|
2167 | 2138 | |
|
2168 | 2139 | (Normally having previously passed in the configspec when the ConfigObj |
|
2169 | 2140 | was created - you can dynamically assign a dictionary of checks to the |
|
2170 | 2141 | ``configspec`` attribute of a section though). |
|
2171 | 2142 | |
|
2172 | 2143 | It returns ``True`` if everything passes, or a dictionary of |
|
2173 | 2144 | pass/fails (True/False). If every member of a subsection passes, it |
|
2174 | 2145 | will just have the value ``True``. (It also returns ``False`` if all |
|
2175 | 2146 | members fail). |
|
2176 | 2147 | |
|
2177 | 2148 | In addition, it converts the values from strings to their native |
|
2178 | 2149 | types if their checks pass (and ``stringify`` is set). |
|
2179 | 2150 | |
|
2180 | 2151 | If ``preserve_errors`` is ``True`` (``False`` is default) then instead |
|
2181 | 2152 | of a marking a fail with a ``False``, it will preserve the actual |
|
2182 | 2153 | exception object. This can contain info about the reason for failure. |
|
2183 | 2154 | For example the ``VdtValueTooSmallError`` indicates that the value |
|
2184 | 2155 | supplied was too small. If a value (or section) is missing it will |
|
2185 | 2156 | still be marked as ``False``. |
|
2186 | 2157 | |
|
2187 | 2158 | You must have the validate module to use ``preserve_errors=True``. |
|
2188 | 2159 | |
|
2189 | 2160 | You can then use the ``flatten_errors`` function to turn your nested |
|
2190 | 2161 | results dictionary into a flattened list of failures - useful for |
|
2191 | 2162 | displaying meaningful error messages. |
|
2192 | 2163 | """ |
|
2193 | 2164 | if section is None: |
|
2194 | 2165 | if self.configspec is None: |
|
2195 | 2166 | raise ValueError('No configspec supplied.') |
|
2196 | 2167 | if preserve_errors: |
|
2197 | 2168 | # We do this once to remove a top level dependency on the validate module |
|
2198 | 2169 | # Which makes importing configobj faster |
|
2199 | 2170 | from validate import VdtMissingValue |
|
2200 | 2171 | self._vdtMissingValue = VdtMissingValue |
|
2201 | 2172 | section = self |
|
2202 | 2173 | # |
|
2203 | 2174 | spec_section = section.configspec |
|
2204 | 2175 | if copy and hasattr(section, '_configspec_initial_comment'): |
|
2205 | 2176 | section.initial_comment = section._configspec_initial_comment |
|
2206 | 2177 | section.final_comment = section._configspec_final_comment |
|
2207 | 2178 | section.encoding = section._configspec_encoding |
|
2208 | 2179 | section.BOM = section._configspec_BOM |
|
2209 | 2180 | section.newlines = section._configspec_newlines |
|
2210 | 2181 | section.indent_type = section._configspec_indent_type |
|
2211 | 2182 | |
|
2212 | 2183 | if '__many__' in section.configspec: |
|
2213 | 2184 | many = spec_section['__many__'] |
|
2214 | 2185 | # dynamically assign the configspecs |
|
2215 | 2186 | # for the sections below |
|
2216 | 2187 | for entry in section.sections: |
|
2217 | 2188 | self._handle_repeat(section[entry], many) |
|
2218 | 2189 | # |
|
2219 | 2190 | out = {} |
|
2220 | 2191 | ret_true = True |
|
2221 | 2192 | ret_false = True |
|
2222 | 2193 | order = [k for k in section._order if k in spec_section] |
|
2223 | 2194 | order += [k for k in spec_section if k not in order] |
|
2224 | 2195 | for entry in order: |
|
2225 | 2196 | if entry == '__many__': |
|
2226 | 2197 | continue |
|
2227 | 2198 | if (not entry in section.scalars) or (entry in section.defaults): |
|
2228 | 2199 | # missing entries |
|
2229 | 2200 | # or entries from defaults |
|
2230 | 2201 | missing = True |
|
2231 | 2202 | val = None |
|
2232 | 2203 | if copy and not entry in section.scalars: |
|
2233 | 2204 | # copy comments |
|
2234 | 2205 | section.comments[entry] = ( |
|
2235 | 2206 | section._configspec_comments.get(entry, [])) |
|
2236 | 2207 | section.inline_comments[entry] = ( |
|
2237 | 2208 | section._configspec_inline_comments.get(entry, '')) |
|
2238 | 2209 | # |
|
2239 | 2210 | else: |
|
2240 | 2211 | missing = False |
|
2241 | 2212 | val = section[entry] |
|
2242 | 2213 | try: |
|
2243 | 2214 | check = validator.check(spec_section[entry], |
|
2244 | 2215 | val, |
|
2245 | 2216 | missing=missing |
|
2246 | 2217 | ) |
|
2247 | 2218 | except validator.baseErrorClass, e: |
|
2248 | 2219 | if not preserve_errors or isinstance(e, self._vdtMissingValue): |
|
2249 | 2220 | out[entry] = False |
|
2250 | 2221 | else: |
|
2251 | 2222 | # preserve the error |
|
2252 | 2223 | out[entry] = e |
|
2253 | 2224 | ret_false = False |
|
2254 | 2225 | ret_true = False |
|
2255 | 2226 | else: |
|
2256 | 2227 | try: |
|
2257 | 2228 | section.default_values.pop(entry, None) |
|
2258 | 2229 | except AttributeError: |
|
2259 | 2230 | # For Python 2.2 compatibility |
|
2260 | 2231 | try: |
|
2261 | 2232 | del section.default_values[entry] |
|
2262 | 2233 | except KeyError: |
|
2263 | 2234 | pass |
|
2264 | 2235 | |
|
2265 | 2236 | if hasattr(validator, 'get_default_value'): |
|
2266 | 2237 | try: |
|
2267 | 2238 | section.default_values[entry] = validator.get_default_value(spec_section[entry]) |
|
2268 | 2239 | except KeyError: |
|
2269 | 2240 | # No default |
|
2270 | 2241 | pass |
|
2271 | 2242 | |
|
2272 | 2243 | ret_false = False |
|
2273 | 2244 | out[entry] = True |
|
2274 | 2245 | if self.stringify or missing: |
|
2275 | 2246 | # if we are doing type conversion |
|
2276 | 2247 | # or the value is a supplied default |
|
2277 | 2248 | if not self.stringify: |
|
2278 | 2249 | if isinstance(check, (list, tuple)): |
|
2279 | 2250 | # preserve lists |
|
2280 | 2251 | check = [self._str(item) for item in check] |
|
2281 | 2252 | elif missing and check is None: |
|
2282 | 2253 | # convert the None from a default to a '' |
|
2283 | 2254 | check = '' |
|
2284 | 2255 | else: |
|
2285 | 2256 | check = self._str(check) |
|
2286 | 2257 | if (check != val) or missing: |
|
2287 | 2258 | section[entry] = check |
|
2288 | 2259 | if not copy and missing and entry not in section.defaults: |
|
2289 | 2260 | section.defaults.append(entry) |
|
2290 | 2261 | # Missing sections will have been created as empty ones when the |
|
2291 | 2262 | # configspec was read. |
|
2292 | 2263 | for entry in section.sections: |
|
2293 | 2264 | # FIXME: this means DEFAULT is not copied in copy mode |
|
2294 | 2265 | if section is self and entry == 'DEFAULT': |
|
2295 | 2266 | continue |
|
2296 | 2267 | if copy: |
|
2297 | 2268 | section.comments[entry] = section._cs_section_comments[entry] |
|
2298 | 2269 | section.inline_comments[entry] = ( |
|
2299 | 2270 | section._cs_section_inline_comments[entry]) |
|
2300 | 2271 | check = self.validate(validator, preserve_errors=preserve_errors, |
|
2301 | 2272 | copy=copy, section=section[entry]) |
|
2302 | 2273 | out[entry] = check |
|
2303 | 2274 | if check == False: |
|
2304 | 2275 | ret_true = False |
|
2305 | 2276 | elif check == True: |
|
2306 | 2277 | ret_false = False |
|
2307 | 2278 | else: |
|
2308 | 2279 | ret_true = False |
|
2309 | 2280 | ret_false = False |
|
2310 | 2281 | # |
|
2311 | 2282 | if ret_true: |
|
2312 | 2283 | return True |
|
2313 | 2284 | elif ret_false: |
|
2314 | 2285 | return False |
|
2315 | 2286 | return out |
|
2316 | 2287 | |
|
2317 | 2288 | |
|
2318 | 2289 | def reset(self): |
|
2319 | 2290 | """Clear ConfigObj instance and restore to 'freshly created' state.""" |
|
2320 | 2291 | self.clear() |
|
2321 | 2292 | self._initialise() |
|
2322 | 2293 | # FIXME: Should be done by '_initialise', but ConfigObj constructor (and reload) |
|
2323 | 2294 | # requires an empty dictionary |
|
2324 | 2295 | self.configspec = None |
|
2325 | 2296 | # Just to be sure ;-) |
|
2326 | 2297 | self._original_configspec = None |
|
2327 | 2298 | |
|
2328 | 2299 | |
|
2329 | 2300 | def reload(self): |
|
2330 | 2301 | """ |
|
2331 | 2302 | Reload a ConfigObj from file. |
|
2332 | 2303 | |
|
2333 | 2304 | This method raises a ``ReloadError`` if the ConfigObj doesn't have |
|
2334 | 2305 | a filename attribute pointing to a file. |
|
2335 | 2306 | """ |
|
2336 | 2307 | if not isinstance(self.filename, StringTypes): |
|
2337 | 2308 | raise ReloadError() |
|
2338 | 2309 | |
|
2339 | 2310 | filename = self.filename |
|
2340 | 2311 | current_options = {} |
|
2341 | 2312 | for entry in OPTION_DEFAULTS: |
|
2342 | 2313 | if entry == 'configspec': |
|
2343 | 2314 | continue |
|
2344 | 2315 | current_options[entry] = getattr(self, entry) |
|
2345 | 2316 | |
|
2346 | 2317 | configspec = self._original_configspec |
|
2347 | 2318 | current_options['configspec'] = configspec |
|
2348 | 2319 | |
|
2349 | 2320 | self.clear() |
|
2350 | 2321 | self._initialise(current_options) |
|
2351 | 2322 | self._load(filename, configspec) |
|
2352 | 2323 | |
|
2353 | 2324 | |
|
2354 | 2325 | |
|
2355 | 2326 | class SimpleVal(object): |
|
2356 | 2327 | """ |
|
2357 | 2328 | A simple validator. |
|
2358 | 2329 | Can be used to check that all members expected are present. |
|
2359 | 2330 | |
|
2360 | 2331 | To use it, provide a configspec with all your members in (the value given |
|
2361 | 2332 | will be ignored). Pass an instance of ``SimpleVal`` to the ``validate`` |
|
2362 | 2333 | method of your ``ConfigObj``. ``validate`` will return ``True`` if all |
|
2363 | 2334 | members are present, or a dictionary with True/False meaning |
|
2364 | 2335 | present/missing. (Whole missing sections will be replaced with ``False``) |
|
2365 | 2336 | """ |
|
2366 | 2337 | |
|
2367 | 2338 | def __init__(self): |
|
2368 | 2339 | self.baseErrorClass = ConfigObjError |
|
2369 | 2340 | |
|
2370 | 2341 | def check(self, check, member, missing=False): |
|
2371 | 2342 | """A dummy check method, always returns the value unchanged.""" |
|
2372 | 2343 | if missing: |
|
2373 | 2344 | raise self.baseErrorClass() |
|
2374 | 2345 | return member |
|
2375 | 2346 | |
|
2376 | 2347 | |
|
2377 | 2348 | # Check / processing functions for options |
|
2378 | 2349 | def flatten_errors(cfg, res, levels=None, results=None): |
|
2379 | 2350 | """ |
|
2380 | 2351 | An example function that will turn a nested dictionary of results |
|
2381 | 2352 | (as returned by ``ConfigObj.validate``) into a flat list. |
|
2382 | 2353 | |
|
2383 | 2354 | ``cfg`` is the ConfigObj instance being checked, ``res`` is the results |
|
2384 | 2355 | dictionary returned by ``validate``. |
|
2385 | 2356 | |
|
2386 | 2357 | (This is a recursive function, so you shouldn't use the ``levels`` or |
|
2387 | 2358 | ``results`` arguments - they are used by the function. |
|
2388 | 2359 | |
|
2389 | 2360 | Returns a list of keys that failed. Each member of the list is a tuple : |
|
2390 | 2361 | :: |
|
2391 | 2362 | |
|
2392 | 2363 | ([list of sections...], key, result) |
|
2393 | 2364 | |
|
2394 | 2365 | If ``validate`` was called with ``preserve_errors=False`` (the default) |
|
2395 | 2366 | then ``result`` will always be ``False``. |
|
2396 | 2367 | |
|
2397 | 2368 | *list of sections* is a flattened list of sections that the key was found |
|
2398 | 2369 | in. |
|
2399 | 2370 | |
|
2400 | 2371 | If the section was missing then key will be ``None``. |
|
2401 | 2372 | |
|
2402 | 2373 | If the value (or section) was missing then ``result`` will be ``False``. |
|
2403 | 2374 | |
|
2404 | 2375 | If ``validate`` was called with ``preserve_errors=True`` and a value |
|
2405 | 2376 | was present, but failed the check, then ``result`` will be the exception |
|
2406 | 2377 | object returned. You can use this as a string that describes the failure. |
|
2407 | 2378 | |
|
2408 | 2379 | For example *The value "3" is of the wrong type*. |
|
2409 | 2380 | |
|
2410 | 2381 | >>> import validate |
|
2411 | 2382 | >>> vtor = validate.Validator() |
|
2412 | 2383 | >>> my_ini = ''' |
|
2413 | 2384 | ... option1 = True |
|
2414 | 2385 | ... [section1] |
|
2415 | 2386 | ... option1 = True |
|
2416 | 2387 | ... [section2] |
|
2417 | 2388 | ... another_option = Probably |
|
2418 | 2389 | ... [section3] |
|
2419 | 2390 | ... another_option = True |
|
2420 | 2391 | ... [[section3b]] |
|
2421 | 2392 | ... value = 3 |
|
2422 | 2393 | ... value2 = a |
|
2423 | 2394 | ... value3 = 11 |
|
2424 | 2395 | ... ''' |
|
2425 | 2396 | >>> my_cfg = ''' |
|
2426 | 2397 | ... option1 = boolean() |
|
2427 | 2398 | ... option2 = boolean() |
|
2428 | 2399 | ... option3 = boolean(default=Bad_value) |
|
2429 | 2400 | ... [section1] |
|
2430 | 2401 | ... option1 = boolean() |
|
2431 | 2402 | ... option2 = boolean() |
|
2432 | 2403 | ... option3 = boolean(default=Bad_value) |
|
2433 | 2404 | ... [section2] |
|
2434 | 2405 | ... another_option = boolean() |
|
2435 | 2406 | ... [section3] |
|
2436 | 2407 | ... another_option = boolean() |
|
2437 | 2408 | ... [[section3b]] |
|
2438 | 2409 | ... value = integer |
|
2439 | 2410 | ... value2 = integer |
|
2440 | 2411 | ... value3 = integer(0, 10) |
|
2441 | 2412 | ... [[[section3b-sub]]] |
|
2442 | 2413 | ... value = string |
|
2443 | 2414 | ... [section4] |
|
2444 | 2415 | ... another_option = boolean() |
|
2445 | 2416 | ... ''' |
|
2446 | 2417 | >>> cs = my_cfg.split('\\n') |
|
2447 | 2418 | >>> ini = my_ini.split('\\n') |
|
2448 | 2419 | >>> cfg = ConfigObj(ini, configspec=cs) |
|
2449 | 2420 | >>> res = cfg.validate(vtor, preserve_errors=True) |
|
2450 | 2421 | >>> errors = [] |
|
2451 | 2422 | >>> for entry in flatten_errors(cfg, res): |
|
2452 | 2423 | ... section_list, key, error = entry |
|
2453 | 2424 | ... section_list.insert(0, '[root]') |
|
2454 | 2425 | ... if key is not None: |
|
2455 | 2426 | ... section_list.append(key) |
|
2456 | 2427 | ... else: |
|
2457 | 2428 | ... section_list.append('[missing]') |
|
2458 | 2429 | ... section_string = ', '.join(section_list) |
|
2459 | 2430 | ... errors.append((section_string, ' = ', error)) |
|
2460 | 2431 | >>> errors.sort() |
|
2461 | 2432 | >>> for entry in errors: |
|
2462 | 2433 | ... print entry[0], entry[1], (entry[2] or 0) |
|
2463 | 2434 | [root], option2 = 0 |
|
2464 | 2435 | [root], option3 = the value "Bad_value" is of the wrong type. |
|
2465 | 2436 | [root], section1, option2 = 0 |
|
2466 | 2437 | [root], section1, option3 = the value "Bad_value" is of the wrong type. |
|
2467 | 2438 | [root], section2, another_option = the value "Probably" is of the wrong type. |
|
2468 | 2439 | [root], section3, section3b, section3b-sub, [missing] = 0 |
|
2469 | 2440 | [root], section3, section3b, value2 = the value "a" is of the wrong type. |
|
2470 | 2441 | [root], section3, section3b, value3 = the value "11" is too big. |
|
2471 | 2442 | [root], section4, [missing] = 0 |
|
2472 | 2443 | """ |
|
2473 | 2444 | if levels is None: |
|
2474 | 2445 | # first time called |
|
2475 | 2446 | levels = [] |
|
2476 | 2447 | results = [] |
|
2477 | 2448 | if res is True: |
|
2478 | 2449 | return results |
|
2479 | 2450 | if res is False: |
|
2480 | 2451 | results.append((levels[:], None, False)) |
|
2481 | 2452 | if levels: |
|
2482 | 2453 | levels.pop() |
|
2483 | 2454 | return results |
|
2484 | for (key, val) in res.items(): | |
|
2455 | for (key, val) in res.iteritems(): | |
|
2485 | 2456 | if val == True: |
|
2486 | 2457 | continue |
|
2487 | 2458 | if isinstance(cfg.get(key), dict): |
|
2488 | 2459 | # Go down one level |
|
2489 | 2460 | levels.append(key) |
|
2490 | 2461 | flatten_errors(cfg[key], val, levels, results) |
|
2491 | 2462 | continue |
|
2492 | 2463 | results.append((levels[:], key, val)) |
|
2493 | 2464 | # |
|
2494 | 2465 | # Go up one level |
|
2495 | 2466 | if levels: |
|
2496 | 2467 | levels.pop() |
|
2497 | 2468 | # |
|
2498 | 2469 | return results |
|
2499 | 2470 | |
|
2500 | 2471 | |
|
2501 | 2472 | """*A programming language is a medium of expression.* - Paul Graham""" |
@@ -1,973 +1,944 b'' | |||
|
1 | 1 | """ path.py - An object representing a path to a file or directory. |
|
2 | 2 | |
|
3 | 3 | Example: |
|
4 | 4 | |
|
5 | 5 | from IPython.external.path import path |
|
6 | 6 | d = path('/home/guido/bin') |
|
7 | 7 | for f in d.files('*.py'): |
|
8 | 8 | f.chmod(0755) |
|
9 | 9 | |
|
10 |
This module requires Python 2. |
|
|
10 | This module requires Python 2.5 or later. | |
|
11 | 11 | |
|
12 | 12 | |
|
13 | 13 | URL: http://www.jorendorff.com/articles/python/path |
|
14 | 14 | Author: Jason Orendorff <jason.orendorff\x40gmail\x2ecom> (and others - see the url!) |
|
15 | 15 | Date: 9 Mar 2007 |
|
16 | 16 | """ |
|
17 | 17 | |
|
18 | 18 | |
|
19 | 19 | # TODO |
|
20 | 20 | # - Tree-walking functions don't avoid symlink loops. Matt Harrison |
|
21 | 21 | # sent me a patch for this. |
|
22 | 22 | # - Bug in write_text(). It doesn't support Universal newline mode. |
|
23 | 23 | # - Better error message in listdir() when self isn't a |
|
24 | 24 | # directory. (On Windows, the error message really sucks.) |
|
25 | 25 | # - Make sure everything has a good docstring. |
|
26 | 26 | # - Add methods for regex find and replace. |
|
27 | 27 | # - guess_content_type() method? |
|
28 | 28 | # - Perhaps support arguments to touch(). |
|
29 | 29 | |
|
30 | 30 | from __future__ import generators |
|
31 | 31 | |
|
32 | 32 | import sys, warnings, os, fnmatch, glob, shutil, codecs |
|
33 | # deprecated in python 2.6 | |
|
34 | warnings.filterwarnings('ignore', r'.*md5.*') | |
|
35 | import md5 | |
|
33 | from hashlib import md5 | |
|
36 | 34 | |
|
37 | 35 | __version__ = '2.2' |
|
38 | 36 | __all__ = ['path'] |
|
39 | 37 | |
|
40 | 38 | # Platform-specific support for path.owner |
|
41 | 39 | if os.name == 'nt': |
|
42 | 40 | try: |
|
43 | 41 | import win32security |
|
44 | 42 | except ImportError: |
|
45 | 43 | win32security = None |
|
46 | 44 | else: |
|
47 | 45 | try: |
|
48 | 46 | import pwd |
|
49 | 47 | except ImportError: |
|
50 | 48 | pwd = None |
|
51 | 49 | |
|
52 | # Pre-2.3 support. Are unicode filenames supported? | |
|
53 | _base = str | |
|
54 | _getcwd = os.getcwd | |
|
55 | try: | |
|
56 | if os.path.supports_unicode_filenames: | |
|
57 | _base = unicode | |
|
58 | _getcwd = os.getcwdu | |
|
59 | except AttributeError: | |
|
60 | pass | |
|
61 | ||
|
62 | # Pre-2.3 workaround for booleans | |
|
63 | try: | |
|
64 | True, False | |
|
65 | except NameError: | |
|
66 | True, False = 1, 0 | |
|
67 | ||
|
68 | # Pre-2.3 workaround for basestring. | |
|
69 | try: | |
|
70 | basestring | |
|
71 | except NameError: | |
|
72 | basestring = (str, unicode) | |
|
73 | ||
|
74 | # Universal newline support | |
|
75 | _textmode = 'r' | |
|
76 | if hasattr(file, 'newlines'): | |
|
77 | _textmode = 'U' | |
|
78 | ||
|
79 | 50 | |
|
80 | 51 | class TreeWalkWarning(Warning): |
|
81 | 52 | pass |
|
82 | 53 | |
|
83 |
class path( |
|
|
54 | class path(unicode): | |
|
84 | 55 | """ Represents a filesystem path. |
|
85 | 56 | |
|
86 | 57 | For documentation on individual methods, consult their |
|
87 | 58 | counterparts in os.path. |
|
88 | 59 | """ |
|
89 | 60 | |
|
90 | 61 | # --- Special Python methods. |
|
91 | 62 | |
|
92 | 63 | def __repr__(self): |
|
93 |
return 'path(%s)' % |
|
|
64 | return 'path(%s)' % unicode.__repr__(self) | |
|
94 | 65 | |
|
95 | 66 | # Adding a path and a string yields a path. |
|
96 | 67 | def __add__(self, more): |
|
97 | 68 | try: |
|
98 |
resultStr = |
|
|
69 | resultStr = unicode.__add__(self, more) | |
|
99 | 70 | except TypeError: #Python bug |
|
100 | 71 | resultStr = NotImplemented |
|
101 | 72 | if resultStr is NotImplemented: |
|
102 | 73 | return resultStr |
|
103 | 74 | return self.__class__(resultStr) |
|
104 | 75 | |
|
105 | 76 | def __radd__(self, other): |
|
106 | 77 | if isinstance(other, basestring): |
|
107 | 78 | return self.__class__(other.__add__(self)) |
|
108 | 79 | else: |
|
109 | 80 | return NotImplemented |
|
110 | 81 | |
|
111 | 82 | # The / operator joins paths. |
|
112 | 83 | def __div__(self, rel): |
|
113 | 84 | """ fp.__div__(rel) == fp / rel == fp.joinpath(rel) |
|
114 | 85 | |
|
115 | 86 | Join two path components, adding a separator character if |
|
116 | 87 | needed. |
|
117 | 88 | """ |
|
118 | 89 | return self.__class__(os.path.join(self, rel)) |
|
119 | 90 | |
|
120 | 91 | # Make the / operator work even when true division is enabled. |
|
121 | 92 | __truediv__ = __div__ |
|
122 | 93 | |
|
123 | 94 | def getcwd(cls): |
|
124 | 95 | """ Return the current working directory as a path object. """ |
|
125 |
return cls( |
|
|
96 | return cls(os.getcwdu()) | |
|
126 | 97 | getcwd = classmethod(getcwd) |
|
127 | 98 | |
|
128 | 99 | |
|
129 | 100 | # --- Operations on path strings. |
|
130 | 101 | |
|
131 | 102 | isabs = os.path.isabs |
|
132 | 103 | def abspath(self): return self.__class__(os.path.abspath(self)) |
|
133 | 104 | def normcase(self): return self.__class__(os.path.normcase(self)) |
|
134 | 105 | def normpath(self): return self.__class__(os.path.normpath(self)) |
|
135 | 106 | def realpath(self): return self.__class__(os.path.realpath(self)) |
|
136 | 107 | def expanduser(self): return self.__class__(os.path.expanduser(self)) |
|
137 | 108 | def expandvars(self): return self.__class__(os.path.expandvars(self)) |
|
138 | 109 | def dirname(self): return self.__class__(os.path.dirname(self)) |
|
139 | 110 | basename = os.path.basename |
|
140 | 111 | |
|
141 | 112 | def expand(self): |
|
142 | 113 | """ Clean up a filename by calling expandvars(), |
|
143 | 114 | expanduser(), and normpath() on it. |
|
144 | 115 | |
|
145 | 116 | This is commonly everything needed to clean up a filename |
|
146 | 117 | read from a configuration file, for example. |
|
147 | 118 | """ |
|
148 | 119 | return self.expandvars().expanduser().normpath() |
|
149 | 120 | |
|
150 | 121 | def _get_namebase(self): |
|
151 | 122 | base, ext = os.path.splitext(self.name) |
|
152 | 123 | return base |
|
153 | 124 | |
|
154 | 125 | def _get_ext(self): |
|
155 |
f, ext = os.path.splitext( |
|
|
126 | f, ext = os.path.splitext(unicode(self)) | |
|
156 | 127 | return ext |
|
157 | 128 | |
|
158 | 129 | def _get_drive(self): |
|
159 | 130 | drive, r = os.path.splitdrive(self) |
|
160 | 131 | return self.__class__(drive) |
|
161 | 132 | |
|
162 | 133 | parent = property( |
|
163 | 134 | dirname, None, None, |
|
164 | 135 | """ This path's parent directory, as a new path object. |
|
165 | 136 | |
|
166 | 137 | For example, path('/usr/local/lib/libpython.so').parent == path('/usr/local/lib') |
|
167 | 138 | """) |
|
168 | 139 | |
|
169 | 140 | name = property( |
|
170 | 141 | basename, None, None, |
|
171 | 142 | """ The name of this file or directory without the full path. |
|
172 | 143 | |
|
173 | 144 | For example, path('/usr/local/lib/libpython.so').name == 'libpython.so' |
|
174 | 145 | """) |
|
175 | 146 | |
|
176 | 147 | namebase = property( |
|
177 | 148 | _get_namebase, None, None, |
|
178 | 149 | """ The same as path.name, but with one file extension stripped off. |
|
179 | 150 | |
|
180 | 151 | For example, path('/home/guido/python.tar.gz').name == 'python.tar.gz', |
|
181 | 152 | but path('/home/guido/python.tar.gz').namebase == 'python.tar' |
|
182 | 153 | """) |
|
183 | 154 | |
|
184 | 155 | ext = property( |
|
185 | 156 | _get_ext, None, None, |
|
186 | 157 | """ The file extension, for example '.py'. """) |
|
187 | 158 | |
|
188 | 159 | drive = property( |
|
189 | 160 | _get_drive, None, None, |
|
190 | 161 | """ The drive specifier, for example 'C:'. |
|
191 | 162 | This is always empty on systems that don't use drive specifiers. |
|
192 | 163 | """) |
|
193 | 164 | |
|
194 | 165 | def splitpath(self): |
|
195 | 166 | """ p.splitpath() -> Return (p.parent, p.name). """ |
|
196 | 167 | parent, child = os.path.split(self) |
|
197 | 168 | return self.__class__(parent), child |
|
198 | 169 | |
|
199 | 170 | def splitdrive(self): |
|
200 | 171 | """ p.splitdrive() -> Return (p.drive, <the rest of p>). |
|
201 | 172 | |
|
202 | 173 | Split the drive specifier from this path. If there is |
|
203 | 174 | no drive specifier, p.drive is empty, so the return value |
|
204 | 175 | is simply (path(''), p). This is always the case on Unix. |
|
205 | 176 | """ |
|
206 | 177 | drive, rel = os.path.splitdrive(self) |
|
207 | 178 | return self.__class__(drive), rel |
|
208 | 179 | |
|
209 | 180 | def splitext(self): |
|
210 | 181 | """ p.splitext() -> Return (p.stripext(), p.ext). |
|
211 | 182 | |
|
212 | 183 | Split the filename extension from this path and return |
|
213 | 184 | the two parts. Either part may be empty. |
|
214 | 185 | |
|
215 | 186 | The extension is everything from '.' to the end of the |
|
216 | 187 | last path segment. This has the property that if |
|
217 | 188 | (a, b) == p.splitext(), then a + b == p. |
|
218 | 189 | """ |
|
219 | 190 | filename, ext = os.path.splitext(self) |
|
220 | 191 | return self.__class__(filename), ext |
|
221 | 192 | |
|
222 | 193 | def stripext(self): |
|
223 | 194 | """ p.stripext() -> Remove one file extension from the path. |
|
224 | 195 | |
|
225 | 196 | For example, path('/home/guido/python.tar.gz').stripext() |
|
226 | 197 | returns path('/home/guido/python.tar'). |
|
227 | 198 | """ |
|
228 | 199 | return self.splitext()[0] |
|
229 | 200 | |
|
230 | 201 | if hasattr(os.path, 'splitunc'): |
|
231 | 202 | def splitunc(self): |
|
232 | 203 | unc, rest = os.path.splitunc(self) |
|
233 | 204 | return self.__class__(unc), rest |
|
234 | 205 | |
|
235 | 206 | def _get_uncshare(self): |
|
236 | 207 | unc, r = os.path.splitunc(self) |
|
237 | 208 | return self.__class__(unc) |
|
238 | 209 | |
|
239 | 210 | uncshare = property( |
|
240 | 211 | _get_uncshare, None, None, |
|
241 | 212 | """ The UNC mount point for this path. |
|
242 | 213 | This is empty for paths on local drives. """) |
|
243 | 214 | |
|
244 | 215 | def joinpath(self, *args): |
|
245 | 216 | """ Join two or more path components, adding a separator |
|
246 | 217 | character (os.sep) if needed. Returns a new path |
|
247 | 218 | object. |
|
248 | 219 | """ |
|
249 | 220 | return self.__class__(os.path.join(self, *args)) |
|
250 | 221 | |
|
251 | 222 | def splitall(self): |
|
252 | 223 | r""" Return a list of the path components in this path. |
|
253 | 224 | |
|
254 | 225 | The first item in the list will be a path. Its value will be |
|
255 | 226 | either os.curdir, os.pardir, empty, or the root directory of |
|
256 | 227 | this path (for example, '/' or 'C:\\'). The other items in |
|
257 | 228 | the list will be strings. |
|
258 | 229 | |
|
259 | 230 | path.path.joinpath(*result) will yield the original path. |
|
260 | 231 | """ |
|
261 | 232 | parts = [] |
|
262 | 233 | loc = self |
|
263 | 234 | while loc != os.curdir and loc != os.pardir: |
|
264 | 235 | prev = loc |
|
265 | 236 | loc, child = prev.splitpath() |
|
266 | 237 | if loc == prev: |
|
267 | 238 | break |
|
268 | 239 | parts.append(child) |
|
269 | 240 | parts.append(loc) |
|
270 | 241 | parts.reverse() |
|
271 | 242 | return parts |
|
272 | 243 | |
|
273 | 244 | def relpath(self): |
|
274 | 245 | """ Return this path as a relative path, |
|
275 | 246 | based from the current working directory. |
|
276 | 247 | """ |
|
277 | 248 | cwd = self.__class__(os.getcwd()) |
|
278 | 249 | return cwd.relpathto(self) |
|
279 | 250 | |
|
280 | 251 | def relpathto(self, dest): |
|
281 | 252 | """ Return a relative path from self to dest. |
|
282 | 253 | |
|
283 | 254 | If there is no relative path from self to dest, for example if |
|
284 | 255 | they reside on different drives in Windows, then this returns |
|
285 | 256 | dest.abspath(). |
|
286 | 257 | """ |
|
287 | 258 | origin = self.abspath() |
|
288 | 259 | dest = self.__class__(dest).abspath() |
|
289 | 260 | |
|
290 | 261 | orig_list = origin.normcase().splitall() |
|
291 | 262 | # Don't normcase dest! We want to preserve the case. |
|
292 | 263 | dest_list = dest.splitall() |
|
293 | 264 | |
|
294 | 265 | if orig_list[0] != os.path.normcase(dest_list[0]): |
|
295 | 266 | # Can't get here from there. |
|
296 | 267 | return dest |
|
297 | 268 | |
|
298 | 269 | # Find the location where the two paths start to differ. |
|
299 | 270 | i = 0 |
|
300 | 271 | for start_seg, dest_seg in zip(orig_list, dest_list): |
|
301 | 272 | if start_seg != os.path.normcase(dest_seg): |
|
302 | 273 | break |
|
303 | 274 | i += 1 |
|
304 | 275 | |
|
305 | 276 | # Now i is the point where the two paths diverge. |
|
306 | 277 | # Need a certain number of "os.pardir"s to work up |
|
307 | 278 | # from the origin to the point of divergence. |
|
308 | 279 | segments = [os.pardir] * (len(orig_list) - i) |
|
309 | 280 | # Need to add the diverging part of dest_list. |
|
310 | 281 | segments += dest_list[i:] |
|
311 | 282 | if len(segments) == 0: |
|
312 | 283 | # If they happen to be identical, use os.curdir. |
|
313 | 284 | relpath = os.curdir |
|
314 | 285 | else: |
|
315 | 286 | relpath = os.path.join(*segments) |
|
316 | 287 | return self.__class__(relpath) |
|
317 | 288 | |
|
318 | 289 | # --- Listing, searching, walking, and matching |
|
319 | 290 | |
|
320 | 291 | def listdir(self, pattern=None): |
|
321 | 292 | """ D.listdir() -> List of items in this directory. |
|
322 | 293 | |
|
323 | 294 | Use D.files() or D.dirs() instead if you want a listing |
|
324 | 295 | of just files or just subdirectories. |
|
325 | 296 | |
|
326 | 297 | The elements of the list are path objects. |
|
327 | 298 | |
|
328 | 299 | With the optional 'pattern' argument, this only lists |
|
329 | 300 | items whose names match the given pattern. |
|
330 | 301 | """ |
|
331 | 302 | names = os.listdir(self) |
|
332 | 303 | if pattern is not None: |
|
333 | 304 | names = fnmatch.filter(names, pattern) |
|
334 | 305 | return [self / child for child in names] |
|
335 | 306 | |
|
336 | 307 | def dirs(self, pattern=None): |
|
337 | 308 | """ D.dirs() -> List of this directory's subdirectories. |
|
338 | 309 | |
|
339 | 310 | The elements of the list are path objects. |
|
340 | 311 | This does not walk recursively into subdirectories |
|
341 | 312 | (but see path.walkdirs). |
|
342 | 313 | |
|
343 | 314 | With the optional 'pattern' argument, this only lists |
|
344 | 315 | directories whose names match the given pattern. For |
|
345 | 316 | example, d.dirs('build-*'). |
|
346 | 317 | """ |
|
347 | 318 | return [p for p in self.listdir(pattern) if p.isdir()] |
|
348 | 319 | |
|
349 | 320 | def files(self, pattern=None): |
|
350 | 321 | """ D.files() -> List of the files in this directory. |
|
351 | 322 | |
|
352 | 323 | The elements of the list are path objects. |
|
353 | 324 | This does not walk into subdirectories (see path.walkfiles). |
|
354 | 325 | |
|
355 | 326 | With the optional 'pattern' argument, this only lists files |
|
356 | 327 | whose names match the given pattern. For example, |
|
357 | 328 | d.files('*.pyc'). |
|
358 | 329 | """ |
|
359 | 330 | |
|
360 | 331 | return [p for p in self.listdir(pattern) if p.isfile()] |
|
361 | 332 | |
|
362 | 333 | def walk(self, pattern=None, errors='strict'): |
|
363 | 334 | """ D.walk() -> iterator over files and subdirs, recursively. |
|
364 | 335 | |
|
365 | 336 | The iterator yields path objects naming each child item of |
|
366 | 337 | this directory and its descendants. This requires that |
|
367 | 338 | D.isdir(). |
|
368 | 339 | |
|
369 | 340 | This performs a depth-first traversal of the directory tree. |
|
370 | 341 | Each directory is returned just before all its children. |
|
371 | 342 | |
|
372 | 343 | The errors= keyword argument controls behavior when an |
|
373 | 344 | error occurs. The default is 'strict', which causes an |
|
374 | 345 | exception. The other allowed values are 'warn', which |
|
375 | 346 | reports the error via warnings.warn(), and 'ignore'. |
|
376 | 347 | """ |
|
377 | 348 | if errors not in ('strict', 'warn', 'ignore'): |
|
378 | 349 | raise ValueError("invalid errors parameter") |
|
379 | 350 | |
|
380 | 351 | try: |
|
381 | 352 | childList = self.listdir() |
|
382 | 353 | except Exception: |
|
383 | 354 | if errors == 'ignore': |
|
384 | 355 | return |
|
385 | 356 | elif errors == 'warn': |
|
386 | 357 | warnings.warn( |
|
387 | 358 | "Unable to list directory '%s': %s" |
|
388 | 359 | % (self, sys.exc_info()[1]), |
|
389 | 360 | TreeWalkWarning) |
|
390 | 361 | return |
|
391 | 362 | else: |
|
392 | 363 | raise |
|
393 | 364 | |
|
394 | 365 | for child in childList: |
|
395 | 366 | if pattern is None or child.fnmatch(pattern): |
|
396 | 367 | yield child |
|
397 | 368 | try: |
|
398 | 369 | isdir = child.isdir() |
|
399 | 370 | except Exception: |
|
400 | 371 | if errors == 'ignore': |
|
401 | 372 | isdir = False |
|
402 | 373 | elif errors == 'warn': |
|
403 | 374 | warnings.warn( |
|
404 | 375 | "Unable to access '%s': %s" |
|
405 | 376 | % (child, sys.exc_info()[1]), |
|
406 | 377 | TreeWalkWarning) |
|
407 | 378 | isdir = False |
|
408 | 379 | else: |
|
409 | 380 | raise |
|
410 | 381 | |
|
411 | 382 | if isdir: |
|
412 | 383 | for item in child.walk(pattern, errors): |
|
413 | 384 | yield item |
|
414 | 385 | |
|
415 | 386 | def walkdirs(self, pattern=None, errors='strict'): |
|
416 | 387 | """ D.walkdirs() -> iterator over subdirs, recursively. |
|
417 | 388 | |
|
418 | 389 | With the optional 'pattern' argument, this yields only |
|
419 | 390 | directories whose names match the given pattern. For |
|
420 | 391 | example, mydir.walkdirs('*test') yields only directories |
|
421 | 392 | with names ending in 'test'. |
|
422 | 393 | |
|
423 | 394 | The errors= keyword argument controls behavior when an |
|
424 | 395 | error occurs. The default is 'strict', which causes an |
|
425 | 396 | exception. The other allowed values are 'warn', which |
|
426 | 397 | reports the error via warnings.warn(), and 'ignore'. |
|
427 | 398 | """ |
|
428 | 399 | if errors not in ('strict', 'warn', 'ignore'): |
|
429 | 400 | raise ValueError("invalid errors parameter") |
|
430 | 401 | |
|
431 | 402 | try: |
|
432 | 403 | dirs = self.dirs() |
|
433 | 404 | except Exception: |
|
434 | 405 | if errors == 'ignore': |
|
435 | 406 | return |
|
436 | 407 | elif errors == 'warn': |
|
437 | 408 | warnings.warn( |
|
438 | 409 | "Unable to list directory '%s': %s" |
|
439 | 410 | % (self, sys.exc_info()[1]), |
|
440 | 411 | TreeWalkWarning) |
|
441 | 412 | return |
|
442 | 413 | else: |
|
443 | 414 | raise |
|
444 | 415 | |
|
445 | 416 | for child in dirs: |
|
446 | 417 | if pattern is None or child.fnmatch(pattern): |
|
447 | 418 | yield child |
|
448 | 419 | for subsubdir in child.walkdirs(pattern, errors): |
|
449 | 420 | yield subsubdir |
|
450 | 421 | |
|
451 | 422 | def walkfiles(self, pattern=None, errors='strict'): |
|
452 | 423 | """ D.walkfiles() -> iterator over files in D, recursively. |
|
453 | 424 | |
|
454 | 425 | The optional argument, pattern, limits the results to files |
|
455 | 426 | with names that match the pattern. For example, |
|
456 | 427 | mydir.walkfiles('*.tmp') yields only files with the .tmp |
|
457 | 428 | extension. |
|
458 | 429 | """ |
|
459 | 430 | if errors not in ('strict', 'warn', 'ignore'): |
|
460 | 431 | raise ValueError("invalid errors parameter") |
|
461 | 432 | |
|
462 | 433 | try: |
|
463 | 434 | childList = self.listdir() |
|
464 | 435 | except Exception: |
|
465 | 436 | if errors == 'ignore': |
|
466 | 437 | return |
|
467 | 438 | elif errors == 'warn': |
|
468 | 439 | warnings.warn( |
|
469 | 440 | "Unable to list directory '%s': %s" |
|
470 | 441 | % (self, sys.exc_info()[1]), |
|
471 | 442 | TreeWalkWarning) |
|
472 | 443 | return |
|
473 | 444 | else: |
|
474 | 445 | raise |
|
475 | 446 | |
|
476 | 447 | for child in childList: |
|
477 | 448 | try: |
|
478 | 449 | isfile = child.isfile() |
|
479 | 450 | isdir = not isfile and child.isdir() |
|
480 | 451 | except: |
|
481 | 452 | if errors == 'ignore': |
|
482 | 453 | continue |
|
483 | 454 | elif errors == 'warn': |
|
484 | 455 | warnings.warn( |
|
485 | 456 | "Unable to access '%s': %s" |
|
486 | 457 | % (self, sys.exc_info()[1]), |
|
487 | 458 | TreeWalkWarning) |
|
488 | 459 | continue |
|
489 | 460 | else: |
|
490 | 461 | raise |
|
491 | 462 | |
|
492 | 463 | if isfile: |
|
493 | 464 | if pattern is None or child.fnmatch(pattern): |
|
494 | 465 | yield child |
|
495 | 466 | elif isdir: |
|
496 | 467 | for f in child.walkfiles(pattern, errors): |
|
497 | 468 | yield f |
|
498 | 469 | |
|
499 | 470 | def fnmatch(self, pattern): |
|
500 | 471 | """ Return True if self.name matches the given pattern. |
|
501 | 472 | |
|
502 | 473 | pattern - A filename pattern with wildcards, |
|
503 | 474 | for example '*.py'. |
|
504 | 475 | """ |
|
505 | 476 | return fnmatch.fnmatch(self.name, pattern) |
|
506 | 477 | |
|
507 | 478 | def glob(self, pattern): |
|
508 | 479 | """ Return a list of path objects that match the pattern. |
|
509 | 480 | |
|
510 | 481 | pattern - a path relative to this directory, with wildcards. |
|
511 | 482 | |
|
512 | 483 | For example, path('/users').glob('*/bin/*') returns a list |
|
513 | 484 | of all the files users have in their bin directories. |
|
514 | 485 | """ |
|
515 | 486 | cls = self.__class__ |
|
516 |
return [cls(s) for s in glob.glob( |
|
|
487 | return [cls(s) for s in glob.glob(unicode(self / pattern))] | |
|
517 | 488 | |
|
518 | 489 | |
|
519 | 490 | # --- Reading or writing an entire file at once. |
|
520 | 491 | |
|
521 | 492 | def open(self, mode='r'): |
|
522 | 493 | """ Open this file. Return a file object. """ |
|
523 |
return |
|
|
494 | return open(self, mode) | |
|
524 | 495 | |
|
525 | 496 | def bytes(self): |
|
526 | 497 | """ Open this file, read all bytes, return them as a string. """ |
|
527 | 498 | f = self.open('rb') |
|
528 | 499 | try: |
|
529 | 500 | return f.read() |
|
530 | 501 | finally: |
|
531 | 502 | f.close() |
|
532 | 503 | |
|
533 | 504 | def write_bytes(self, bytes, append=False): |
|
534 | 505 | """ Open this file and write the given bytes to it. |
|
535 | 506 | |
|
536 | 507 | Default behavior is to overwrite any existing file. |
|
537 | 508 | Call p.write_bytes(bytes, append=True) to append instead. |
|
538 | 509 | """ |
|
539 | 510 | if append: |
|
540 | 511 | mode = 'ab' |
|
541 | 512 | else: |
|
542 | 513 | mode = 'wb' |
|
543 | 514 | f = self.open(mode) |
|
544 | 515 | try: |
|
545 | 516 | f.write(bytes) |
|
546 | 517 | finally: |
|
547 | 518 | f.close() |
|
548 | 519 | |
|
549 | 520 | def text(self, encoding=None, errors='strict'): |
|
550 | 521 | r""" Open this file, read it in, return the content as a string. |
|
551 | 522 | |
|
552 | 523 | This uses 'U' mode in Python 2.3 and later, so '\r\n' and '\r' |
|
553 | 524 | are automatically translated to '\n'. |
|
554 | 525 | |
|
555 | 526 | Optional arguments: |
|
556 | 527 | |
|
557 | 528 | encoding - The Unicode encoding (or character set) of |
|
558 | 529 | the file. If present, the content of the file is |
|
559 | 530 | decoded and returned as a unicode object; otherwise |
|
560 | 531 | it is returned as an 8-bit str. |
|
561 | 532 | errors - How to handle Unicode errors; see help(str.decode) |
|
562 | 533 | for the options. Default is 'strict'. |
|
563 | 534 | """ |
|
564 | 535 | if encoding is None: |
|
565 | 536 | # 8-bit |
|
566 |
f = self.open( |
|
|
537 | f = self.open('U') | |
|
567 | 538 | try: |
|
568 | 539 | return f.read() |
|
569 | 540 | finally: |
|
570 | 541 | f.close() |
|
571 | 542 | else: |
|
572 | 543 | # Unicode |
|
573 | 544 | f = codecs.open(self, 'r', encoding, errors) |
|
574 | 545 | # (Note - Can't use 'U' mode here, since codecs.open |
|
575 | 546 | # doesn't support 'U' mode, even in Python 2.3.) |
|
576 | 547 | try: |
|
577 | 548 | t = f.read() |
|
578 | 549 | finally: |
|
579 | 550 | f.close() |
|
580 | 551 | return (t.replace(u'\r\n', u'\n') |
|
581 | 552 | .replace(u'\r\x85', u'\n') |
|
582 | 553 | .replace(u'\r', u'\n') |
|
583 | 554 | .replace(u'\x85', u'\n') |
|
584 | 555 | .replace(u'\u2028', u'\n')) |
|
585 | 556 | |
|
586 | 557 | def write_text(self, text, encoding=None, errors='strict', linesep=os.linesep, append=False): |
|
587 | 558 | r""" Write the given text to this file. |
|
588 | 559 | |
|
589 | 560 | The default behavior is to overwrite any existing file; |
|
590 | 561 | to append instead, use the 'append=True' keyword argument. |
|
591 | 562 | |
|
592 | 563 | There are two differences between path.write_text() and |
|
593 | 564 | path.write_bytes(): newline handling and Unicode handling. |
|
594 | 565 | See below. |
|
595 | 566 | |
|
596 | 567 | Parameters: |
|
597 | 568 | |
|
598 | 569 | - text - str/unicode - The text to be written. |
|
599 | 570 | |
|
600 | 571 | - encoding - str - The Unicode encoding that will be used. |
|
601 | 572 | This is ignored if 'text' isn't a Unicode string. |
|
602 | 573 | |
|
603 | 574 | - errors - str - How to handle Unicode encoding errors. |
|
604 | 575 | Default is 'strict'. See help(unicode.encode) for the |
|
605 | 576 | options. This is ignored if 'text' isn't a Unicode |
|
606 | 577 | string. |
|
607 | 578 | |
|
608 | 579 | - linesep - keyword argument - str/unicode - The sequence of |
|
609 | 580 | characters to be used to mark end-of-line. The default is |
|
610 | 581 | os.linesep. You can also specify None; this means to |
|
611 | 582 | leave all newlines as they are in 'text'. |
|
612 | 583 | |
|
613 | 584 | - append - keyword argument - bool - Specifies what to do if |
|
614 | 585 | the file already exists (True: append to the end of it; |
|
615 | 586 | False: overwrite it.) The default is False. |
|
616 | 587 | |
|
617 | 588 | |
|
618 | 589 | --- Newline handling. |
|
619 | 590 | |
|
620 | 591 | write_text() converts all standard end-of-line sequences |
|
621 | 592 | ('\n', '\r', and '\r\n') to your platform's default end-of-line |
|
622 | 593 | sequence (see os.linesep; on Windows, for example, the |
|
623 | 594 | end-of-line marker is '\r\n'). |
|
624 | 595 | |
|
625 | 596 | If you don't like your platform's default, you can override it |
|
626 | 597 | using the 'linesep=' keyword argument. If you specifically want |
|
627 | 598 | write_text() to preserve the newlines as-is, use 'linesep=None'. |
|
628 | 599 | |
|
629 | 600 | This applies to Unicode text the same as to 8-bit text, except |
|
630 | 601 | there are three additional standard Unicode end-of-line sequences: |
|
631 | 602 | u'\x85', u'\r\x85', and u'\u2028'. |
|
632 | 603 | |
|
633 | 604 | (This is slightly different from when you open a file for |
|
634 | 605 | writing with fopen(filename, "w") in C or file(filename, 'w') |
|
635 | 606 | in Python.) |
|
636 | 607 | |
|
637 | 608 | |
|
638 | 609 | --- Unicode |
|
639 | 610 | |
|
640 | 611 | If 'text' isn't Unicode, then apart from newline handling, the |
|
641 | 612 | bytes are written verbatim to the file. The 'encoding' and |
|
642 | 613 | 'errors' arguments are not used and must be omitted. |
|
643 | 614 | |
|
644 | 615 | If 'text' is Unicode, it is first converted to bytes using the |
|
645 | 616 | specified 'encoding' (or the default encoding if 'encoding' |
|
646 | 617 | isn't specified). The 'errors' argument applies only to this |
|
647 | 618 | conversion. |
|
648 | 619 | |
|
649 | 620 | """ |
|
650 | 621 | if isinstance(text, unicode): |
|
651 | 622 | if linesep is not None: |
|
652 | 623 | # Convert all standard end-of-line sequences to |
|
653 | 624 | # ordinary newline characters. |
|
654 | 625 | text = (text.replace(u'\r\n', u'\n') |
|
655 | 626 | .replace(u'\r\x85', u'\n') |
|
656 | 627 | .replace(u'\r', u'\n') |
|
657 | 628 | .replace(u'\x85', u'\n') |
|
658 | 629 | .replace(u'\u2028', u'\n')) |
|
659 | 630 | text = text.replace(u'\n', linesep) |
|
660 | 631 | if encoding is None: |
|
661 | 632 | encoding = sys.getdefaultencoding() |
|
662 | 633 | bytes = text.encode(encoding, errors) |
|
663 | 634 | else: |
|
664 | 635 | # It is an error to specify an encoding if 'text' is |
|
665 | 636 | # an 8-bit string. |
|
666 | 637 | assert encoding is None |
|
667 | 638 | |
|
668 | 639 | if linesep is not None: |
|
669 | 640 | text = (text.replace('\r\n', '\n') |
|
670 | 641 | .replace('\r', '\n')) |
|
671 | 642 | bytes = text.replace('\n', linesep) |
|
672 | 643 | |
|
673 | 644 | self.write_bytes(bytes, append) |
|
674 | 645 | |
|
675 | 646 | def lines(self, encoding=None, errors='strict', retain=True): |
|
676 | 647 | r""" Open this file, read all lines, return them in a list. |
|
677 | 648 | |
|
678 | 649 | Optional arguments: |
|
679 | 650 | encoding - The Unicode encoding (or character set) of |
|
680 | 651 | the file. The default is None, meaning the content |
|
681 | 652 | of the file is read as 8-bit characters and returned |
|
682 | 653 | as a list of (non-Unicode) str objects. |
|
683 | 654 | errors - How to handle Unicode errors; see help(str.decode) |
|
684 | 655 | for the options. Default is 'strict' |
|
685 | 656 | retain - If true, retain newline characters; but all newline |
|
686 | 657 | character combinations ('\r', '\n', '\r\n') are |
|
687 | 658 | translated to '\n'. If false, newline characters are |
|
688 | 659 | stripped off. Default is True. |
|
689 | 660 | |
|
690 | 661 | This uses 'U' mode in Python 2.3 and later. |
|
691 | 662 | """ |
|
692 | 663 | if encoding is None and retain: |
|
693 |
f = self.open( |
|
|
664 | f = self.open('U') | |
|
694 | 665 | try: |
|
695 | 666 | return f.readlines() |
|
696 | 667 | finally: |
|
697 | 668 | f.close() |
|
698 | 669 | else: |
|
699 | 670 | return self.text(encoding, errors).splitlines(retain) |
|
700 | 671 | |
|
701 | 672 | def write_lines(self, lines, encoding=None, errors='strict', |
|
702 | 673 | linesep=os.linesep, append=False): |
|
703 | 674 | r""" Write the given lines of text to this file. |
|
704 | 675 | |
|
705 | 676 | By default this overwrites any existing file at this path. |
|
706 | 677 | |
|
707 | 678 | This puts a platform-specific newline sequence on every line. |
|
708 | 679 | See 'linesep' below. |
|
709 | 680 | |
|
710 | 681 | lines - A list of strings. |
|
711 | 682 | |
|
712 | 683 | encoding - A Unicode encoding to use. This applies only if |
|
713 | 684 | 'lines' contains any Unicode strings. |
|
714 | 685 | |
|
715 | 686 | errors - How to handle errors in Unicode encoding. This |
|
716 | 687 | also applies only to Unicode strings. |
|
717 | 688 | |
|
718 | 689 | linesep - The desired line-ending. This line-ending is |
|
719 | 690 | applied to every line. If a line already has any |
|
720 | 691 | standard line ending ('\r', '\n', '\r\n', u'\x85', |
|
721 | 692 | u'\r\x85', u'\u2028'), that will be stripped off and |
|
722 | 693 | this will be used instead. The default is os.linesep, |
|
723 | 694 | which is platform-dependent ('\r\n' on Windows, '\n' on |
|
724 | 695 | Unix, etc.) Specify None to write the lines as-is, |
|
725 | 696 | like file.writelines(). |
|
726 | 697 | |
|
727 | 698 | Use the keyword argument append=True to append lines to the |
|
728 | 699 | file. The default is to overwrite the file. Warning: |
|
729 | 700 | When you use this with Unicode data, if the encoding of the |
|
730 | 701 | existing data in the file is different from the encoding |
|
731 | 702 | you specify with the encoding= parameter, the result is |
|
732 | 703 | mixed-encoding data, which can really confuse someone trying |
|
733 | 704 | to read the file later. |
|
734 | 705 | """ |
|
735 | 706 | if append: |
|
736 | 707 | mode = 'ab' |
|
737 | 708 | else: |
|
738 | 709 | mode = 'wb' |
|
739 | 710 | f = self.open(mode) |
|
740 | 711 | try: |
|
741 | 712 | for line in lines: |
|
742 | 713 | isUnicode = isinstance(line, unicode) |
|
743 | 714 | if linesep is not None: |
|
744 | 715 | # Strip off any existing line-end and add the |
|
745 | 716 | # specified linesep string. |
|
746 | 717 | if isUnicode: |
|
747 | 718 | if line[-2:] in (u'\r\n', u'\x0d\x85'): |
|
748 | 719 | line = line[:-2] |
|
749 | 720 | elif line[-1:] in (u'\r', u'\n', |
|
750 | 721 | u'\x85', u'\u2028'): |
|
751 | 722 | line = line[:-1] |
|
752 | 723 | else: |
|
753 | 724 | if line[-2:] == '\r\n': |
|
754 | 725 | line = line[:-2] |
|
755 | 726 | elif line[-1:] in ('\r', '\n'): |
|
756 | 727 | line = line[:-1] |
|
757 | 728 | line += linesep |
|
758 | 729 | if isUnicode: |
|
759 | 730 | if encoding is None: |
|
760 | 731 | encoding = sys.getdefaultencoding() |
|
761 | 732 | line = line.encode(encoding, errors) |
|
762 | 733 | f.write(line) |
|
763 | 734 | finally: |
|
764 | 735 | f.close() |
|
765 | 736 | |
|
766 | 737 | def read_md5(self): |
|
767 | 738 | """ Calculate the md5 hash for this file. |
|
768 | 739 | |
|
769 | 740 | This reads through the entire file. |
|
770 | 741 | """ |
|
771 | 742 | f = self.open('rb') |
|
772 | 743 | try: |
|
773 |
m = md5 |
|
|
744 | m = md5() | |
|
774 | 745 | while True: |
|
775 | 746 | d = f.read(8192) |
|
776 | 747 | if not d: |
|
777 | 748 | break |
|
778 | 749 | m.update(d) |
|
779 | 750 | finally: |
|
780 | 751 | f.close() |
|
781 | 752 | return m.digest() |
|
782 | 753 | |
|
783 | 754 | # --- Methods for querying the filesystem. |
|
784 | 755 | |
|
785 | 756 | exists = os.path.exists |
|
786 | 757 | isdir = os.path.isdir |
|
787 | 758 | isfile = os.path.isfile |
|
788 | 759 | islink = os.path.islink |
|
789 | 760 | ismount = os.path.ismount |
|
790 | 761 | |
|
791 | 762 | if hasattr(os.path, 'samefile'): |
|
792 | 763 | samefile = os.path.samefile |
|
793 | 764 | |
|
794 | 765 | getatime = os.path.getatime |
|
795 | 766 | atime = property( |
|
796 | 767 | getatime, None, None, |
|
797 | 768 | """ Last access time of the file. """) |
|
798 | 769 | |
|
799 | 770 | getmtime = os.path.getmtime |
|
800 | 771 | mtime = property( |
|
801 | 772 | getmtime, None, None, |
|
802 | 773 | """ Last-modified time of the file. """) |
|
803 | 774 | |
|
804 | 775 | if hasattr(os.path, 'getctime'): |
|
805 | 776 | getctime = os.path.getctime |
|
806 | 777 | ctime = property( |
|
807 | 778 | getctime, None, None, |
|
808 | 779 | """ Creation time of the file. """) |
|
809 | 780 | |
|
810 | 781 | getsize = os.path.getsize |
|
811 | 782 | size = property( |
|
812 | 783 | getsize, None, None, |
|
813 | 784 | """ Size of the file, in bytes. """) |
|
814 | 785 | |
|
815 | 786 | if hasattr(os, 'access'): |
|
816 | 787 | def access(self, mode): |
|
817 | 788 | """ Return true if current user has access to this path. |
|
818 | 789 | |
|
819 | 790 | mode - One of the constants os.F_OK, os.R_OK, os.W_OK, os.X_OK |
|
820 | 791 | """ |
|
821 | 792 | return os.access(self, mode) |
|
822 | 793 | |
|
823 | 794 | def stat(self): |
|
824 | 795 | """ Perform a stat() system call on this path. """ |
|
825 | 796 | return os.stat(self) |
|
826 | 797 | |
|
827 | 798 | def lstat(self): |
|
828 | 799 | """ Like path.stat(), but do not follow symbolic links. """ |
|
829 | 800 | return os.lstat(self) |
|
830 | 801 | |
|
831 | 802 | def get_owner(self): |
|
832 | 803 | r""" Return the name of the owner of this file or directory. |
|
833 | 804 | |
|
834 | 805 | This follows symbolic links. |
|
835 | 806 | |
|
836 | 807 | On Windows, this returns a name of the form ur'DOMAIN\User Name'. |
|
837 | 808 | On Windows, a group can own a file or directory. |
|
838 | 809 | """ |
|
839 | 810 | if os.name == 'nt': |
|
840 | 811 | if win32security is None: |
|
841 | 812 | raise Exception("path.owner requires win32all to be installed") |
|
842 | 813 | desc = win32security.GetFileSecurity( |
|
843 | 814 | self, win32security.OWNER_SECURITY_INFORMATION) |
|
844 | 815 | sid = desc.GetSecurityDescriptorOwner() |
|
845 | 816 | account, domain, typecode = win32security.LookupAccountSid(None, sid) |
|
846 | 817 | return domain + u'\\' + account |
|
847 | 818 | else: |
|
848 | 819 | if pwd is None: |
|
849 | 820 | raise NotImplementedError("path.owner is not implemented on this platform.") |
|
850 | 821 | st = self.stat() |
|
851 | 822 | return pwd.getpwuid(st.st_uid).pw_name |
|
852 | 823 | |
|
853 | 824 | owner = property( |
|
854 | 825 | get_owner, None, None, |
|
855 | 826 | """ Name of the owner of this file or directory. """) |
|
856 | 827 | |
|
857 | 828 | if hasattr(os, 'statvfs'): |
|
858 | 829 | def statvfs(self): |
|
859 | 830 | """ Perform a statvfs() system call on this path. """ |
|
860 | 831 | return os.statvfs(self) |
|
861 | 832 | |
|
862 | 833 | if hasattr(os, 'pathconf'): |
|
863 | 834 | def pathconf(self, name): |
|
864 | 835 | return os.pathconf(self, name) |
|
865 | 836 | |
|
866 | 837 | |
|
867 | 838 | # --- Modifying operations on files and directories |
|
868 | 839 | |
|
869 | 840 | def utime(self, times): |
|
870 | 841 | """ Set the access and modified times of this file. """ |
|
871 | 842 | os.utime(self, times) |
|
872 | 843 | |
|
873 | 844 | def chmod(self, mode): |
|
874 | 845 | os.chmod(self, mode) |
|
875 | 846 | |
|
876 | 847 | if hasattr(os, 'chown'): |
|
877 | 848 | def chown(self, uid, gid): |
|
878 | 849 | os.chown(self, uid, gid) |
|
879 | 850 | |
|
880 | 851 | def rename(self, new): |
|
881 | 852 | os.rename(self, new) |
|
882 | 853 | |
|
883 | 854 | def renames(self, new): |
|
884 | 855 | os.renames(self, new) |
|
885 | 856 | |
|
886 | 857 | |
|
887 | 858 | # --- Create/delete operations on directories |
|
888 | 859 | |
|
889 | 860 | def mkdir(self, mode=0777): |
|
890 | 861 | os.mkdir(self, mode) |
|
891 | 862 | |
|
892 | 863 | def makedirs(self, mode=0777): |
|
893 | 864 | os.makedirs(self, mode) |
|
894 | 865 | |
|
895 | 866 | def rmdir(self): |
|
896 | 867 | os.rmdir(self) |
|
897 | 868 | |
|
898 | 869 | def removedirs(self): |
|
899 | 870 | os.removedirs(self) |
|
900 | 871 | |
|
901 | 872 | |
|
902 | 873 | # --- Modifying operations on files |
|
903 | 874 | |
|
904 | 875 | def touch(self): |
|
905 | 876 | """ Set the access/modified times of this file to the current time. |
|
906 | 877 | Create the file if it does not exist. |
|
907 | 878 | """ |
|
908 | 879 | fd = os.open(self, os.O_WRONLY | os.O_CREAT, 0666) |
|
909 | 880 | os.close(fd) |
|
910 | 881 | os.utime(self, None) |
|
911 | 882 | |
|
912 | 883 | def remove(self): |
|
913 | 884 | os.remove(self) |
|
914 | 885 | |
|
915 | 886 | def unlink(self): |
|
916 | 887 | os.unlink(self) |
|
917 | 888 | |
|
918 | 889 | |
|
919 | 890 | # --- Links |
|
920 | 891 | |
|
921 | 892 | if hasattr(os, 'link'): |
|
922 | 893 | def link(self, newpath): |
|
923 | 894 | """ Create a hard link at 'newpath', pointing to this file. """ |
|
924 | 895 | os.link(self, newpath) |
|
925 | 896 | |
|
926 | 897 | if hasattr(os, 'symlink'): |
|
927 | 898 | def symlink(self, newlink): |
|
928 | 899 | """ Create a symbolic link at 'newlink', pointing here. """ |
|
929 | 900 | os.symlink(self, newlink) |
|
930 | 901 | |
|
931 | 902 | if hasattr(os, 'readlink'): |
|
932 | 903 | def readlink(self): |
|
933 | 904 | """ Return the path to which this symbolic link points. |
|
934 | 905 | |
|
935 | 906 | The result may be an absolute or a relative path. |
|
936 | 907 | """ |
|
937 | 908 | return self.__class__(os.readlink(self)) |
|
938 | 909 | |
|
939 | 910 | def readlinkabs(self): |
|
940 | 911 | """ Return the path to which this symbolic link points. |
|
941 | 912 | |
|
942 | 913 | The result is always an absolute path. |
|
943 | 914 | """ |
|
944 | 915 | p = self.readlink() |
|
945 | 916 | if p.isabs(): |
|
946 | 917 | return p |
|
947 | 918 | else: |
|
948 | 919 | return (self.parent / p).abspath() |
|
949 | 920 | |
|
950 | 921 | |
|
951 | 922 | # --- High-level functions from shutil |
|
952 | 923 | |
|
953 | 924 | copyfile = shutil.copyfile |
|
954 | 925 | copymode = shutil.copymode |
|
955 | 926 | copystat = shutil.copystat |
|
956 | 927 | copy = shutil.copy |
|
957 | 928 | copy2 = shutil.copy2 |
|
958 | 929 | copytree = shutil.copytree |
|
959 | 930 | if hasattr(shutil, 'move'): |
|
960 | 931 | move = shutil.move |
|
961 | 932 | rmtree = shutil.rmtree |
|
962 | 933 | |
|
963 | 934 | |
|
964 | 935 | # --- Special stuff from os |
|
965 | 936 | |
|
966 | 937 | if hasattr(os, 'chroot'): |
|
967 | 938 | def chroot(self): |
|
968 | 939 | os.chroot(self) |
|
969 | 940 | |
|
970 | 941 | if hasattr(os, 'startfile'): |
|
971 | 942 | def startfile(self): |
|
972 | 943 | os.startfile(self) |
|
973 | 944 |
@@ -1,705 +1,690 b'' | |||
|
1 | 1 | # -*- coding: utf-8 -*- |
|
2 | 2 | """ |
|
3 | 3 | pretty |
|
4 | 4 | ~~ |
|
5 | 5 | |
|
6 | 6 | Python advanced pretty printer. This pretty printer is intended to |
|
7 | 7 | replace the old `pprint` python module which does not allow developers |
|
8 | 8 | to provide their own pretty print callbacks. |
|
9 | 9 | |
|
10 | 10 | This module is based on ruby's `prettyprint.rb` library by `Tanaka Akira`. |
|
11 | 11 | |
|
12 | 12 | |
|
13 | 13 | Example Usage |
|
14 | 14 | ============= |
|
15 | 15 | |
|
16 | 16 | To directly print the representation of an object use `pprint`:: |
|
17 | 17 | |
|
18 | 18 | from pretty import pprint |
|
19 | 19 | pprint(complex_object) |
|
20 | 20 | |
|
21 | 21 | To get a string of the output use `pretty`:: |
|
22 | 22 | |
|
23 | 23 | from pretty import pretty |
|
24 | 24 | string = pretty(complex_object) |
|
25 | 25 | |
|
26 | 26 | |
|
27 | 27 | Extending |
|
28 | 28 | ========= |
|
29 | 29 | |
|
30 | 30 | The pretty library allows developers to add pretty printing rules for their |
|
31 | 31 | own objects. This process is straightforward. All you have to do is to |
|
32 | 32 | add a `__pretty__` method to your object and call the methods on the |
|
33 | 33 | pretty printer passed:: |
|
34 | 34 | |
|
35 | 35 | class MyObject(object): |
|
36 | 36 | |
|
37 | 37 | def __pretty__(self, p, cycle): |
|
38 | 38 | ... |
|
39 | 39 | |
|
40 | 40 | Depending on the python version you want to support you have two |
|
41 | 41 | possibilities. The following list shows the python 2.5 version and the |
|
42 | 42 | compatibility one. |
|
43 | 43 | |
|
44 | 44 | |
|
45 | 45 | Here the example implementation of a `__pretty__` method for a list |
|
46 | 46 | subclass for python 2.5 and higher (python 2.5 requires the with statement |
|
47 | 47 | __future__ import):: |
|
48 | 48 | |
|
49 | 49 | class MyList(list): |
|
50 | 50 | |
|
51 | 51 | def __pretty__(self, p, cycle): |
|
52 | 52 | if cycle: |
|
53 | 53 | p.text('MyList(...)') |
|
54 | 54 | else: |
|
55 | 55 | with p.group(8, 'MyList([', '])'): |
|
56 | 56 | for idx, item in enumerate(self): |
|
57 | 57 | if idx: |
|
58 | 58 | p.text(',') |
|
59 | 59 | p.breakable() |
|
60 | 60 | p.pretty(item) |
|
61 | 61 | |
|
62 | 62 | The `cycle` parameter is `True` if pretty detected a cycle. You *have* to |
|
63 | 63 | react to that or the result is an infinite loop. `p.text()` just adds |
|
64 | 64 | non breaking text to the output, `p.breakable()` either adds a whitespace |
|
65 | 65 | or breaks here. If you pass it an argument it's used instead of the |
|
66 | 66 | default space. `p.pretty` prettyprints another object using the pretty print |
|
67 | 67 | method. |
|
68 | 68 | |
|
69 | 69 | The first parameter to the `group` function specifies the extra indentation |
|
70 | 70 | of the next line. In this example the next item will either be not |
|
71 | 71 | breaked (if the items are short enough) or aligned with the right edge of |
|
72 | 72 | the opening bracked of `MyList`. |
|
73 | 73 | |
|
74 | 74 | If you want to support python 2.4 and lower you can use this code:: |
|
75 | 75 | |
|
76 | 76 | class MyList(list): |
|
77 | 77 | |
|
78 | 78 | def __pretty__(self, p, cycle): |
|
79 | 79 | if cycle: |
|
80 | 80 | p.text('MyList(...)') |
|
81 | 81 | else: |
|
82 | 82 | p.begin_group(8, 'MyList([') |
|
83 | 83 | for idx, item in enumerate(self): |
|
84 | 84 | if idx: |
|
85 | 85 | p.text(',') |
|
86 | 86 | p.breakable() |
|
87 | 87 | p.pretty(item) |
|
88 | 88 | p.end_group(8, '])') |
|
89 | 89 | |
|
90 | 90 | If you just want to indent something you can use the group function |
|
91 | 91 | without open / close parameters. Under python 2.5 you can also use this |
|
92 | 92 | code:: |
|
93 | 93 | |
|
94 | 94 | with p.indent(2): |
|
95 | 95 | ... |
|
96 | 96 | |
|
97 | 97 | Or under python2.4 you might want to modify ``p.indentation`` by hand but |
|
98 | 98 | this is rather ugly. |
|
99 | 99 | |
|
100 | 100 | :copyright: 2007 by Armin Ronacher. |
|
101 | 101 | Portions (c) 2009 by Robert Kern. |
|
102 | 102 | :license: BSD License. |
|
103 | 103 | """ |
|
104 | import __future__ | |
|
104 | from __future__ import with_statement | |
|
105 | from contextlib import contextmanager | |
|
105 | 106 | import sys |
|
106 | 107 | import types |
|
107 | 108 | import re |
|
108 | 109 | import datetime |
|
109 | 110 | from StringIO import StringIO |
|
110 | 111 | from collections import deque |
|
111 | 112 | |
|
112 | 113 | |
|
113 | 114 | __all__ = ['pretty', 'pprint', 'PrettyPrinter', 'RepresentationPrinter', |
|
114 | 115 | 'for_type', 'for_type_by_name'] |
|
115 | 116 | |
|
116 | 117 | |
|
117 | 118 | _re_pattern_type = type(re.compile('')) |
|
118 | 119 | |
|
119 | 120 | |
|
120 | 121 | def pretty(obj, verbose=False, max_width=79, newline='\n'): |
|
121 | 122 | """ |
|
122 | 123 | Pretty print the object's representation. |
|
123 | 124 | """ |
|
124 | 125 | stream = StringIO() |
|
125 | 126 | printer = RepresentationPrinter(stream, verbose, max_width, newline) |
|
126 | 127 | printer.pretty(obj) |
|
127 | 128 | printer.flush() |
|
128 | 129 | return stream.getvalue() |
|
129 | 130 | |
|
130 | 131 | |
|
131 | 132 | def pprint(obj, verbose=False, max_width=79, newline='\n'): |
|
132 | 133 | """ |
|
133 | 134 | Like `pretty` but print to stdout. |
|
134 | 135 | """ |
|
135 | 136 | printer = RepresentationPrinter(sys.stdout, verbose, max_width, newline) |
|
136 | 137 | printer.pretty(obj) |
|
137 | 138 | printer.flush() |
|
138 | 139 | sys.stdout.write(newline) |
|
139 | 140 | sys.stdout.flush() |
|
140 | 141 | |
|
141 | ||
|
142 | # add python2.5 context managers if we have the with statement feature | |
|
143 | if hasattr(__future__, 'with_statement'): exec ''' | |
|
144 | from __future__ import with_statement | |
|
145 | from contextlib import contextmanager | |
|
146 | ||
|
147 | 142 | class _PrettyPrinterBase(object): |
|
148 | 143 | |
|
149 | 144 | @contextmanager |
|
150 | 145 | def indent(self, indent): |
|
151 | 146 | """with statement support for indenting/dedenting.""" |
|
152 | 147 | self.indentation += indent |
|
153 | 148 | try: |
|
154 | 149 | yield |
|
155 | 150 | finally: |
|
156 | 151 | self.indentation -= indent |
|
157 | 152 | |
|
158 | 153 | @contextmanager |
|
159 | 154 | def group(self, indent=0, open='', close=''): |
|
160 | 155 | """like begin_group / end_group but for the with statement.""" |
|
161 | 156 | self.begin_group(indent, open) |
|
162 | 157 | try: |
|
163 | 158 | with self.indent(indent): |
|
164 | 159 | yield |
|
165 | 160 | finally: |
|
166 | 161 | self.end_group(indent, close) |
|
167 | ''' | |
|
168 | else: | |
|
169 | class _PrettyPrinterBase(object): | |
|
170 | ||
|
171 | def _unsupported(self, *a, **kw): | |
|
172 | """unsupported operation""" | |
|
173 | raise RuntimeError('not available in this python version') | |
|
174 | group = indent = _unsupported | |
|
175 | del _unsupported | |
|
176 | ||
|
177 | 162 | |
|
178 | 163 | class PrettyPrinter(_PrettyPrinterBase): |
|
179 | 164 | """ |
|
180 | 165 | Baseclass for the `RepresentationPrinter` prettyprinter that is used to |
|
181 | 166 | generate pretty reprs of objects. Contrary to the `RepresentationPrinter` |
|
182 | 167 | this printer knows nothing about the default pprinters or the `__pretty__` |
|
183 | 168 | callback method. |
|
184 | 169 | """ |
|
185 | 170 | |
|
186 | 171 | def __init__(self, output, max_width=79, newline='\n'): |
|
187 | 172 | self.output = output |
|
188 | 173 | self.max_width = max_width |
|
189 | 174 | self.newline = newline |
|
190 | 175 | self.output_width = 0 |
|
191 | 176 | self.buffer_width = 0 |
|
192 | 177 | self.buffer = deque() |
|
193 | 178 | |
|
194 | 179 | root_group = Group(0) |
|
195 | 180 | self.group_stack = [root_group] |
|
196 | 181 | self.group_queue = GroupQueue(root_group) |
|
197 | 182 | self.indentation = 0 |
|
198 | 183 | |
|
199 | 184 | def _break_outer_groups(self): |
|
200 | 185 | while self.max_width < self.output_width + self.buffer_width: |
|
201 | 186 | group = self.group_queue.deq() |
|
202 | 187 | if not group: |
|
203 | 188 | return |
|
204 | 189 | while group.breakables: |
|
205 | 190 | x = self.buffer.popleft() |
|
206 | 191 | self.output_width = x.output(self.output, self.output_width) |
|
207 | 192 | self.buffer_width -= x.width |
|
208 | 193 | while self.buffer and isinstance(self.buffer[0], Text): |
|
209 | 194 | x = self.buffer.popleft() |
|
210 | 195 | self.output_width = x.output(self.output, self.output_width) |
|
211 | 196 | self.buffer_width -= x.width |
|
212 | 197 | |
|
213 | 198 | def text(self, obj): |
|
214 | 199 | """Add literal text to the output.""" |
|
215 | 200 | width = len(obj) |
|
216 | 201 | if self.buffer: |
|
217 | 202 | text = self.buffer[-1] |
|
218 | 203 | if not isinstance(text, Text): |
|
219 | 204 | text = Text() |
|
220 | 205 | self.buffer.append(text) |
|
221 | 206 | text.add(obj, width) |
|
222 | 207 | self.buffer_width += width |
|
223 | 208 | self._break_outer_groups() |
|
224 | 209 | else: |
|
225 | 210 | self.output.write(obj) |
|
226 | 211 | self.output_width += width |
|
227 | 212 | |
|
228 | 213 | def breakable(self, sep=' '): |
|
229 | 214 | """ |
|
230 | 215 | Add a breakable separator to the output. This does not mean that it |
|
231 | 216 | will automatically break here. If no breaking on this position takes |
|
232 | 217 | place the `sep` is inserted which default to one space. |
|
233 | 218 | """ |
|
234 | 219 | width = len(sep) |
|
235 | 220 | group = self.group_stack[-1] |
|
236 | 221 | if group.want_break: |
|
237 | 222 | self.flush() |
|
238 | 223 | self.output.write(self.newline) |
|
239 | 224 | self.output.write(' ' * self.indentation) |
|
240 | 225 | self.output_width = self.indentation |
|
241 | 226 | self.buffer_width = 0 |
|
242 | 227 | else: |
|
243 | 228 | self.buffer.append(Breakable(sep, width, self)) |
|
244 | 229 | self.buffer_width += width |
|
245 | 230 | self._break_outer_groups() |
|
246 | 231 | |
|
247 | 232 | |
|
248 | 233 | def begin_group(self, indent=0, open=''): |
|
249 | 234 | """ |
|
250 | 235 | Begin a group. If you want support for python < 2.5 which doesn't has |
|
251 | 236 | the with statement this is the preferred way: |
|
252 | 237 | |
|
253 | 238 | p.begin_group(1, '{') |
|
254 | 239 | ... |
|
255 | 240 | p.end_group(1, '}') |
|
256 | 241 | |
|
257 | 242 | The python 2.5 expression would be this: |
|
258 | 243 | |
|
259 | 244 | with p.group(1, '{', '}'): |
|
260 | 245 | ... |
|
261 | 246 | |
|
262 | 247 | The first parameter specifies the indentation for the next line (usually |
|
263 | 248 | the width of the opening text), the second the opening text. All |
|
264 | 249 | parameters are optional. |
|
265 | 250 | """ |
|
266 | 251 | if open: |
|
267 | 252 | self.text(open) |
|
268 | 253 | group = Group(self.group_stack[-1].depth + 1) |
|
269 | 254 | self.group_stack.append(group) |
|
270 | 255 | self.group_queue.enq(group) |
|
271 | 256 | self.indentation += indent |
|
272 | 257 | |
|
273 | 258 | def end_group(self, dedent=0, close=''): |
|
274 | 259 | """End a group. See `begin_group` for more details.""" |
|
275 | 260 | self.indentation -= dedent |
|
276 | 261 | group = self.group_stack.pop() |
|
277 | 262 | if not group.breakables: |
|
278 | 263 | self.group_queue.remove(group) |
|
279 | 264 | if close: |
|
280 | 265 | self.text(close) |
|
281 | 266 | |
|
282 | 267 | def flush(self): |
|
283 | 268 | """Flush data that is left in the buffer.""" |
|
284 | 269 | for data in self.buffer: |
|
285 | 270 | self.output_width += data.output(self.output, self.output_width) |
|
286 | 271 | self.buffer.clear() |
|
287 | 272 | self.buffer_width = 0 |
|
288 | 273 | |
|
289 | 274 | |
|
290 | 275 | def _get_mro(obj_class): |
|
291 | 276 | """ Get a reasonable method resolution order of a class and its superclasses |
|
292 | 277 | for both old-style and new-style classes. |
|
293 | 278 | """ |
|
294 | 279 | if not hasattr(obj_class, '__mro__'): |
|
295 | 280 | # Old-style class. Mix in object to make a fake new-style class. |
|
296 | 281 | try: |
|
297 | 282 | obj_class = type(obj_class.__name__, (obj_class, object), {}) |
|
298 | 283 | except TypeError: |
|
299 | 284 | # Old-style extension type that does not descend from object. |
|
300 | 285 | # FIXME: try to construct a more thorough MRO. |
|
301 | 286 | mro = [obj_class] |
|
302 | 287 | else: |
|
303 | 288 | mro = obj_class.__mro__[1:-1] |
|
304 | 289 | else: |
|
305 | 290 | mro = obj_class.__mro__ |
|
306 | 291 | return mro |
|
307 | 292 | |
|
308 | 293 | |
|
309 | 294 | class RepresentationPrinter(PrettyPrinter): |
|
310 | 295 | """ |
|
311 | 296 | Special pretty printer that has a `pretty` method that calls the pretty |
|
312 | 297 | printer for a python object. |
|
313 | 298 | |
|
314 | 299 | This class stores processing data on `self` so you must *never* use |
|
315 | 300 | this class in a threaded environment. Always lock it or reinstanciate |
|
316 | 301 | it. |
|
317 | 302 | |
|
318 | 303 | Instances also have a verbose flag callbacks can access to control their |
|
319 | 304 | output. For example the default instance repr prints all attributes and |
|
320 | 305 | methods that are not prefixed by an underscore if the printer is in |
|
321 | 306 | verbose mode. |
|
322 | 307 | """ |
|
323 | 308 | |
|
324 | 309 | def __init__(self, output, verbose=False, max_width=79, newline='\n'): |
|
325 | 310 | PrettyPrinter.__init__(self, output, max_width, newline) |
|
326 | 311 | self.verbose = verbose |
|
327 | 312 | self.stack = [] |
|
328 | 313 | |
|
329 | 314 | def pretty(self, obj): |
|
330 | 315 | """Pretty print the given object.""" |
|
331 | 316 | obj_id = id(obj) |
|
332 | 317 | cycle = obj_id in self.stack |
|
333 | 318 | self.stack.append(obj_id) |
|
334 | 319 | self.begin_group() |
|
335 | 320 | try: |
|
336 | 321 | obj_class = getattr(obj, '__class__', None) or type(obj) |
|
337 | 322 | if hasattr(obj_class, '__pretty__'): |
|
338 | 323 | return obj_class.__pretty__(obj, self, cycle) |
|
339 | 324 | try: |
|
340 | 325 | printer = _singleton_pprinters[obj_id] |
|
341 | 326 | except (TypeError, KeyError): |
|
342 | 327 | pass |
|
343 | 328 | else: |
|
344 | 329 | return printer(obj, self, cycle) |
|
345 | 330 | for cls in _get_mro(obj_class): |
|
346 | 331 | if cls in _type_pprinters: |
|
347 | 332 | return _type_pprinters[cls](obj, self, cycle) |
|
348 | 333 | else: |
|
349 | 334 | printer = self._in_deferred_types(cls) |
|
350 | 335 | if printer is not None: |
|
351 | 336 | return printer(obj, self, cycle) |
|
352 | 337 | return _default_pprint(obj, self, cycle) |
|
353 | 338 | finally: |
|
354 | 339 | self.end_group() |
|
355 | 340 | self.stack.pop() |
|
356 | 341 | |
|
357 | 342 | def _in_deferred_types(self, cls): |
|
358 | 343 | """ |
|
359 | 344 | Check if the given class is specified in the deferred type registry. |
|
360 | 345 | |
|
361 | 346 | Returns the printer from the registry if it exists, and None if the |
|
362 | 347 | class is not in the registry. Successful matches will be moved to the |
|
363 | 348 | regular type registry for future use. |
|
364 | 349 | """ |
|
365 | 350 | mod = getattr(cls, '__module__', None) |
|
366 | 351 | name = getattr(cls, '__name__', None) |
|
367 | 352 | key = (mod, name) |
|
368 | 353 | printer = None |
|
369 | 354 | if key in _deferred_type_pprinters: |
|
370 | 355 | # Move the printer over to the regular registry. |
|
371 | 356 | printer = _deferred_type_pprinters.pop(key) |
|
372 | 357 | _type_pprinters[cls] = printer |
|
373 | 358 | return printer |
|
374 | 359 | |
|
375 | 360 | |
|
376 | 361 | |
|
377 | 362 | class Printable(object): |
|
378 | 363 | |
|
379 | 364 | def output(self, stream, output_width): |
|
380 | 365 | return output_width |
|
381 | 366 | |
|
382 | 367 | |
|
383 | 368 | class Text(Printable): |
|
384 | 369 | |
|
385 | 370 | def __init__(self): |
|
386 | 371 | self.objs = [] |
|
387 | 372 | self.width = 0 |
|
388 | 373 | |
|
389 | 374 | def output(self, stream, output_width): |
|
390 | 375 | for obj in self.objs: |
|
391 | 376 | stream.write(obj) |
|
392 | 377 | return output_width + self.width |
|
393 | 378 | |
|
394 | 379 | def add(self, obj, width): |
|
395 | 380 | self.objs.append(obj) |
|
396 | 381 | self.width += width |
|
397 | 382 | |
|
398 | 383 | |
|
399 | 384 | class Breakable(Printable): |
|
400 | 385 | |
|
401 | 386 | def __init__(self, seq, width, pretty): |
|
402 | 387 | self.obj = seq |
|
403 | 388 | self.width = width |
|
404 | 389 | self.pretty = pretty |
|
405 | 390 | self.indentation = pretty.indentation |
|
406 | 391 | self.group = pretty.group_stack[-1] |
|
407 | 392 | self.group.breakables.append(self) |
|
408 | 393 | |
|
409 | 394 | def output(self, stream, output_width): |
|
410 | 395 | self.group.breakables.popleft() |
|
411 | 396 | if self.group.want_break: |
|
412 | 397 | stream.write(self.pretty.newline) |
|
413 | 398 | stream.write(' ' * self.indentation) |
|
414 | 399 | return self.indentation |
|
415 | 400 | if not self.group.breakables: |
|
416 | 401 | self.pretty.group_queue.remove(self.group) |
|
417 | 402 | stream.write(self.obj) |
|
418 | 403 | return output_width + self.width |
|
419 | 404 | |
|
420 | 405 | |
|
421 | 406 | class Group(Printable): |
|
422 | 407 | |
|
423 | 408 | def __init__(self, depth): |
|
424 | 409 | self.depth = depth |
|
425 | 410 | self.breakables = deque() |
|
426 | 411 | self.want_break = False |
|
427 | 412 | |
|
428 | 413 | |
|
429 | 414 | class GroupQueue(object): |
|
430 | 415 | |
|
431 | 416 | def __init__(self, *groups): |
|
432 | 417 | self.queue = [] |
|
433 | 418 | for group in groups: |
|
434 | 419 | self.enq(group) |
|
435 | 420 | |
|
436 | 421 | def enq(self, group): |
|
437 | 422 | depth = group.depth |
|
438 | 423 | while depth > len(self.queue) - 1: |
|
439 | 424 | self.queue.append([]) |
|
440 | 425 | self.queue[depth].append(group) |
|
441 | 426 | |
|
442 | 427 | def deq(self): |
|
443 | 428 | for stack in self.queue: |
|
444 | 429 | for idx, group in enumerate(reversed(stack)): |
|
445 | 430 | if group.breakables: |
|
446 | 431 | del stack[idx] |
|
447 | 432 | group.want_break = True |
|
448 | 433 | return group |
|
449 | 434 | for group in stack: |
|
450 | 435 | group.want_break = True |
|
451 | 436 | del stack[:] |
|
452 | 437 | |
|
453 | 438 | def remove(self, group): |
|
454 | 439 | try: |
|
455 | 440 | self.queue[group.depth].remove(group) |
|
456 | 441 | except ValueError: |
|
457 | 442 | pass |
|
458 | 443 | |
|
459 | 444 | |
|
460 | 445 | _baseclass_reprs = (object.__repr__, types.InstanceType.__repr__) |
|
461 | 446 | |
|
462 | 447 | |
|
463 | 448 | def _default_pprint(obj, p, cycle): |
|
464 | 449 | """ |
|
465 | 450 | The default print function. Used if an object does not provide one and |
|
466 | 451 | it's none of the builtin objects. |
|
467 | 452 | """ |
|
468 | 453 | klass = getattr(obj, '__class__', None) or type(obj) |
|
469 | 454 | if getattr(klass, '__repr__', None) not in _baseclass_reprs: |
|
470 | 455 | # A user-provided repr. |
|
471 | 456 | p.text(repr(obj)) |
|
472 | 457 | return |
|
473 | 458 | p.begin_group(1, '<') |
|
474 | 459 | p.pretty(klass) |
|
475 | 460 | p.text(' at 0x%x' % id(obj)) |
|
476 | 461 | if cycle: |
|
477 | 462 | p.text(' ...') |
|
478 | 463 | elif p.verbose: |
|
479 | 464 | first = True |
|
480 | 465 | for key in dir(obj): |
|
481 | 466 | if not key.startswith('_'): |
|
482 | 467 | try: |
|
483 | 468 | value = getattr(obj, key) |
|
484 | 469 | except AttributeError: |
|
485 | 470 | continue |
|
486 | 471 | if isinstance(value, types.MethodType): |
|
487 | 472 | continue |
|
488 | 473 | if not first: |
|
489 | 474 | p.text(',') |
|
490 | 475 | p.breakable() |
|
491 | 476 | p.text(key) |
|
492 | 477 | p.text('=') |
|
493 | 478 | step = len(key) + 1 |
|
494 | 479 | p.indentation += step |
|
495 | 480 | p.pretty(value) |
|
496 | 481 | p.indentation -= step |
|
497 | 482 | first = False |
|
498 | 483 | p.end_group(1, '>') |
|
499 | 484 | |
|
500 | 485 | |
|
501 | 486 | def _seq_pprinter_factory(start, end): |
|
502 | 487 | """ |
|
503 | 488 | Factory that returns a pprint function useful for sequences. Used by |
|
504 | 489 | the default pprint for tuples, dicts, lists, sets and frozensets. |
|
505 | 490 | """ |
|
506 | 491 | def inner(obj, p, cycle): |
|
507 | 492 | if cycle: |
|
508 | 493 | return p.text(start + '...' + end) |
|
509 | 494 | step = len(start) |
|
510 | 495 | p.begin_group(step, start) |
|
511 | 496 | for idx, x in enumerate(obj): |
|
512 | 497 | if idx: |
|
513 | 498 | p.text(',') |
|
514 | 499 | p.breakable() |
|
515 | 500 | p.pretty(x) |
|
516 | 501 | if len(obj) == 1 and type(obj) is tuple: |
|
517 | 502 | # Special case for 1-item tuples. |
|
518 | 503 | p.text(',') |
|
519 | 504 | p.end_group(step, end) |
|
520 | 505 | return inner |
|
521 | 506 | |
|
522 | 507 | |
|
523 | 508 | def _dict_pprinter_factory(start, end): |
|
524 | 509 | """ |
|
525 | 510 | Factory that returns a pprint function used by the default pprint of |
|
526 | 511 | dicts and dict proxies. |
|
527 | 512 | """ |
|
528 | 513 | def inner(obj, p, cycle): |
|
529 | 514 | if cycle: |
|
530 | 515 | return p.text('{...}') |
|
531 | 516 | p.begin_group(1, start) |
|
532 | 517 | keys = obj.keys() |
|
533 | 518 | try: |
|
534 | 519 | keys.sort() |
|
535 | 520 | except Exception, e: |
|
536 | 521 | # Sometimes the keys don't sort. |
|
537 | 522 | pass |
|
538 | 523 | for idx, key in enumerate(keys): |
|
539 | 524 | if idx: |
|
540 | 525 | p.text(',') |
|
541 | 526 | p.breakable() |
|
542 | 527 | p.pretty(key) |
|
543 | 528 | p.text(': ') |
|
544 | 529 | p.pretty(obj[key]) |
|
545 | 530 | p.end_group(1, end) |
|
546 | 531 | return inner |
|
547 | 532 | |
|
548 | 533 | |
|
549 | 534 | def _super_pprint(obj, p, cycle): |
|
550 | 535 | """The pprint for the super type.""" |
|
551 | 536 | p.begin_group(8, '<super: ') |
|
552 | 537 | p.pretty(obj.__self_class__) |
|
553 | 538 | p.text(',') |
|
554 | 539 | p.breakable() |
|
555 | 540 | p.pretty(obj.__self__) |
|
556 | 541 | p.end_group(8, '>') |
|
557 | 542 | |
|
558 | 543 | |
|
559 | 544 | def _re_pattern_pprint(obj, p, cycle): |
|
560 | 545 | """The pprint function for regular expression patterns.""" |
|
561 | 546 | p.text('re.compile(') |
|
562 | 547 | pattern = repr(obj.pattern) |
|
563 | 548 | if pattern[:1] in 'uU': |
|
564 | 549 | pattern = pattern[1:] |
|
565 | 550 | prefix = 'ur' |
|
566 | 551 | else: |
|
567 | 552 | prefix = 'r' |
|
568 | 553 | pattern = prefix + pattern.replace('\\\\', '\\') |
|
569 | 554 | p.text(pattern) |
|
570 | 555 | if obj.flags: |
|
571 | 556 | p.text(',') |
|
572 | 557 | p.breakable() |
|
573 | 558 | done_one = False |
|
574 | 559 | for flag in ('TEMPLATE', 'IGNORECASE', 'LOCALE', 'MULTILINE', 'DOTALL', |
|
575 | 560 | 'UNICODE', 'VERBOSE', 'DEBUG'): |
|
576 | 561 | if obj.flags & getattr(re, flag): |
|
577 | 562 | if done_one: |
|
578 | 563 | p.text('|') |
|
579 | 564 | p.text('re.' + flag) |
|
580 | 565 | done_one = True |
|
581 | 566 | p.text(')') |
|
582 | 567 | |
|
583 | 568 | |
|
584 | 569 | def _type_pprint(obj, p, cycle): |
|
585 | 570 | """The pprint for classes and types.""" |
|
586 | 571 | if obj.__module__ in ('__builtin__', 'exceptions'): |
|
587 | 572 | name = obj.__name__ |
|
588 | 573 | else: |
|
589 | 574 | name = obj.__module__ + '.' + obj.__name__ |
|
590 | 575 | p.text(name) |
|
591 | 576 | |
|
592 | 577 | |
|
593 | 578 | def _repr_pprint(obj, p, cycle): |
|
594 | 579 | """A pprint that just redirects to the normal repr function.""" |
|
595 | 580 | p.text(repr(obj)) |
|
596 | 581 | |
|
597 | 582 | |
|
598 | 583 | def _function_pprint(obj, p, cycle): |
|
599 | 584 | """Base pprint for all functions and builtin functions.""" |
|
600 | 585 | if obj.__module__ in ('__builtin__', 'exceptions') or not obj.__module__: |
|
601 | 586 | name = obj.__name__ |
|
602 | 587 | else: |
|
603 | 588 | name = obj.__module__ + '.' + obj.__name__ |
|
604 | 589 | p.text('<function %s>' % name) |
|
605 | 590 | |
|
606 | 591 | |
|
607 | 592 | def _exception_pprint(obj, p, cycle): |
|
608 | 593 | """Base pprint for all exceptions.""" |
|
609 | 594 | if obj.__class__.__module__ == 'exceptions': |
|
610 | 595 | name = obj.__class__.__name__ |
|
611 | 596 | else: |
|
612 | 597 | name = '%s.%s' % ( |
|
613 | 598 | obj.__class__.__module__, |
|
614 | 599 | obj.__class__.__name__ |
|
615 | 600 | ) |
|
616 | 601 | step = len(name) + 1 |
|
617 | 602 | p.begin_group(step, '(') |
|
618 | 603 | for idx, arg in enumerate(getattr(obj, 'args', ())): |
|
619 | 604 | if idx: |
|
620 | 605 | p.text(',') |
|
621 | 606 | p.breakable() |
|
622 | 607 | p.pretty(arg) |
|
623 | 608 | p.end_group(step, ')') |
|
624 | 609 | |
|
625 | 610 | |
|
626 | 611 | #: the exception base |
|
627 | 612 | try: |
|
628 | 613 | _exception_base = BaseException |
|
629 | 614 | except NameError: |
|
630 | 615 | _exception_base = Exception |
|
631 | 616 | |
|
632 | 617 | |
|
633 | 618 | #: printers for builtin types |
|
634 | 619 | _type_pprinters = { |
|
635 | 620 | int: _repr_pprint, |
|
636 | 621 | long: _repr_pprint, |
|
637 | 622 | float: _repr_pprint, |
|
638 | 623 | str: _repr_pprint, |
|
639 | 624 | unicode: _repr_pprint, |
|
640 | 625 | tuple: _seq_pprinter_factory('(', ')'), |
|
641 | 626 | list: _seq_pprinter_factory('[', ']'), |
|
642 | 627 | dict: _dict_pprinter_factory('{', '}'), |
|
643 | 628 | types.DictProxyType: _dict_pprinter_factory('<dictproxy {', '}>'), |
|
644 | 629 | set: _seq_pprinter_factory('set([', '])'), |
|
645 | 630 | frozenset: _seq_pprinter_factory('frozenset([', '])'), |
|
646 | 631 | super: _super_pprint, |
|
647 | 632 | _re_pattern_type: _re_pattern_pprint, |
|
648 | 633 | type: _type_pprint, |
|
649 | 634 | types.ClassType: _type_pprint, |
|
650 | 635 | types.FunctionType: _function_pprint, |
|
651 | 636 | types.BuiltinFunctionType: _function_pprint, |
|
652 | 637 | types.SliceType: _repr_pprint, |
|
653 | 638 | types.MethodType: _repr_pprint, |
|
654 | 639 | xrange: _repr_pprint, |
|
655 | 640 | datetime.datetime: _repr_pprint, |
|
656 | 641 | datetime.timedelta: _repr_pprint, |
|
657 | 642 | _exception_base: _exception_pprint |
|
658 | 643 | } |
|
659 | 644 | |
|
660 | 645 | #: printers for types specified by name |
|
661 | 646 | _deferred_type_pprinters = { |
|
662 | 647 | } |
|
663 | 648 | |
|
664 | 649 | def for_type(typ, func): |
|
665 | 650 | """ |
|
666 | 651 | Add a pretty printer for a given type. |
|
667 | 652 | """ |
|
668 | 653 | oldfunc = _type_pprinters.get(typ, None) |
|
669 | 654 | if func is not None: |
|
670 | 655 | # To support easy restoration of old pprinters, we need to ignore Nones. |
|
671 | 656 | _type_pprinters[typ] = func |
|
672 | 657 | return oldfunc |
|
673 | 658 | |
|
674 | 659 | def for_type_by_name(type_module, type_name, func): |
|
675 | 660 | """ |
|
676 | 661 | Add a pretty printer for a type specified by the module and name of a type |
|
677 | 662 | rather than the type object itself. |
|
678 | 663 | """ |
|
679 | 664 | key = (type_module, type_name) |
|
680 | 665 | oldfunc = _deferred_type_pprinters.get(key, None) |
|
681 | 666 | if func is not None: |
|
682 | 667 | # To support easy restoration of old pprinters, we need to ignore Nones. |
|
683 | 668 | _deferred_type_pprinters[key] = func |
|
684 | 669 | return oldfunc |
|
685 | 670 | |
|
686 | 671 | |
|
687 | 672 | #: printers for the default singletons |
|
688 | 673 | _singleton_pprinters = dict.fromkeys(map(id, [None, True, False, Ellipsis, |
|
689 | 674 | NotImplemented]), _repr_pprint) |
|
690 | 675 | |
|
691 | 676 | |
|
692 | 677 | if __name__ == '__main__': |
|
693 | 678 | from random import randrange |
|
694 | 679 | class Foo(object): |
|
695 | 680 | def __init__(self): |
|
696 | 681 | self.foo = 1 |
|
697 | 682 | self.bar = re.compile(r'\s+') |
|
698 | 683 | self.blub = dict.fromkeys(range(30), randrange(1, 40)) |
|
699 | 684 | self.hehe = 23424.234234 |
|
700 | 685 | self.list = ["blub", "blah", self] |
|
701 | 686 | |
|
702 | 687 | def get_foo(self): |
|
703 | 688 | print "foo" |
|
704 | 689 | |
|
705 | 690 | pprint(Foo(), verbose=True) |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
|
1 | NO CONTENT: modified file | |
The requested commit or file is too big and content was truncated. Show full diff |
General Comments 0
You need to be logged in to leave comments.
Login now