##// END OF EJS Templates
moved getdefaultencoding from text to py3compat
Brandon Parsons -
Show More

The requested changes are too big and content was truncated. Show full diff

@@ -1,702 +1,702 b''
1 """A simple configuration system.
1 """A simple configuration system.
2
2
3 Authors
3 Authors
4 -------
4 -------
5 * Brian Granger
5 * Brian Granger
6 * Fernando Perez
6 * Fernando Perez
7 * Min RK
7 * Min RK
8 """
8 """
9
9
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11 # Copyright (C) 2008-2011 The IPython Development Team
11 # Copyright (C) 2008-2011 The IPython Development Team
12 #
12 #
13 # Distributed under the terms of the BSD License. The full license is in
13 # Distributed under the terms of the BSD License. The full license is in
14 # the file COPYING, distributed as part of this software.
14 # the file COPYING, distributed as part of this software.
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16
16
17 #-----------------------------------------------------------------------------
17 #-----------------------------------------------------------------------------
18 # Imports
18 # Imports
19 #-----------------------------------------------------------------------------
19 #-----------------------------------------------------------------------------
20
20
21 import __builtin__ as builtin_mod
21 import __builtin__ as builtin_mod
22 import os
22 import os
23 import re
23 import re
24 import sys
24 import sys
25
25
26 from IPython.external import argparse
26 from IPython.external import argparse
27 from IPython.utils.path import filefind, get_ipython_dir
27 from IPython.utils.path import filefind, get_ipython_dir
28 from IPython.utils import py3compat, text, warn
28 from IPython.utils import py3compat, text, warn
29
29
30 #-----------------------------------------------------------------------------
30 #-----------------------------------------------------------------------------
31 # Exceptions
31 # Exceptions
32 #-----------------------------------------------------------------------------
32 #-----------------------------------------------------------------------------
33
33
34
34
35 class ConfigError(Exception):
35 class ConfigError(Exception):
36 pass
36 pass
37
37
38 class ConfigLoaderError(ConfigError):
38 class ConfigLoaderError(ConfigError):
39 pass
39 pass
40
40
41 class ConfigFileNotFound(ConfigError):
41 class ConfigFileNotFound(ConfigError):
42 pass
42 pass
43
43
44 class ArgumentError(ConfigLoaderError):
44 class ArgumentError(ConfigLoaderError):
45 pass
45 pass
46
46
47 #-----------------------------------------------------------------------------
47 #-----------------------------------------------------------------------------
48 # Argparse fix
48 # Argparse fix
49 #-----------------------------------------------------------------------------
49 #-----------------------------------------------------------------------------
50
50
51 # Unfortunately argparse by default prints help messages to stderr instead of
51 # Unfortunately argparse by default prints help messages to stderr instead of
52 # stdout. This makes it annoying to capture long help screens at the command
52 # stdout. This makes it annoying to capture long help screens at the command
53 # line, since one must know how to pipe stderr, which many users don't know how
53 # line, since one must know how to pipe stderr, which many users don't know how
54 # to do. So we override the print_help method with one that defaults to
54 # to do. So we override the print_help method with one that defaults to
55 # stdout and use our class instead.
55 # stdout and use our class instead.
56
56
57 class ArgumentParser(argparse.ArgumentParser):
57 class ArgumentParser(argparse.ArgumentParser):
58 """Simple argparse subclass that prints help to stdout by default."""
58 """Simple argparse subclass that prints help to stdout by default."""
59
59
60 def print_help(self, file=None):
60 def print_help(self, file=None):
61 if file is None:
61 if file is None:
62 file = sys.stdout
62 file = sys.stdout
63 return super(ArgumentParser, self).print_help(file)
63 return super(ArgumentParser, self).print_help(file)
64
64
65 print_help.__doc__ = argparse.ArgumentParser.print_help.__doc__
65 print_help.__doc__ = argparse.ArgumentParser.print_help.__doc__
66
66
67 #-----------------------------------------------------------------------------
67 #-----------------------------------------------------------------------------
68 # Config class for holding config information
68 # Config class for holding config information
69 #-----------------------------------------------------------------------------
69 #-----------------------------------------------------------------------------
70
70
71
71
72 class Config(dict):
72 class Config(dict):
73 """An attribute based dict that can do smart merges."""
73 """An attribute based dict that can do smart merges."""
74
74
75 def __init__(self, *args, **kwds):
75 def __init__(self, *args, **kwds):
76 dict.__init__(self, *args, **kwds)
76 dict.__init__(self, *args, **kwds)
77 # This sets self.__dict__ = self, but it has to be done this way
77 # This sets self.__dict__ = self, but it has to be done this way
78 # because we are also overriding __setattr__.
78 # because we are also overriding __setattr__.
79 dict.__setattr__(self, '__dict__', self)
79 dict.__setattr__(self, '__dict__', self)
80
80
81 def _merge(self, other):
81 def _merge(self, other):
82 to_update = {}
82 to_update = {}
83 for k, v in other.iteritems():
83 for k, v in other.iteritems():
84 if not self.has_key(k):
84 if not self.has_key(k):
85 to_update[k] = v
85 to_update[k] = v
86 else: # I have this key
86 else: # I have this key
87 if isinstance(v, Config):
87 if isinstance(v, Config):
88 # Recursively merge common sub Configs
88 # Recursively merge common sub Configs
89 self[k]._merge(v)
89 self[k]._merge(v)
90 else:
90 else:
91 # Plain updates for non-Configs
91 # Plain updates for non-Configs
92 to_update[k] = v
92 to_update[k] = v
93
93
94 self.update(to_update)
94 self.update(to_update)
95
95
96 def _is_section_key(self, key):
96 def _is_section_key(self, key):
97 if key[0].upper()==key[0] and not key.startswith('_'):
97 if key[0].upper()==key[0] and not key.startswith('_'):
98 return True
98 return True
99 else:
99 else:
100 return False
100 return False
101
101
102 def __contains__(self, key):
102 def __contains__(self, key):
103 if self._is_section_key(key):
103 if self._is_section_key(key):
104 return True
104 return True
105 else:
105 else:
106 return super(Config, self).__contains__(key)
106 return super(Config, self).__contains__(key)
107 # .has_key is deprecated for dictionaries.
107 # .has_key is deprecated for dictionaries.
108 has_key = __contains__
108 has_key = __contains__
109
109
110 def _has_section(self, key):
110 def _has_section(self, key):
111 if self._is_section_key(key):
111 if self._is_section_key(key):
112 if super(Config, self).__contains__(key):
112 if super(Config, self).__contains__(key):
113 return True
113 return True
114 return False
114 return False
115
115
116 def copy(self):
116 def copy(self):
117 return type(self)(dict.copy(self))
117 return type(self)(dict.copy(self))
118
118
119 def __copy__(self):
119 def __copy__(self):
120 return self.copy()
120 return self.copy()
121
121
122 def __deepcopy__(self, memo):
122 def __deepcopy__(self, memo):
123 import copy
123 import copy
124 return type(self)(copy.deepcopy(self.items()))
124 return type(self)(copy.deepcopy(self.items()))
125
125
126 def __getitem__(self, key):
126 def __getitem__(self, key):
127 # We cannot use directly self._is_section_key, because it triggers
127 # We cannot use directly self._is_section_key, because it triggers
128 # infinite recursion on top of PyPy. Instead, we manually fish the
128 # infinite recursion on top of PyPy. Instead, we manually fish the
129 # bound method.
129 # bound method.
130 is_section_key = self.__class__._is_section_key.__get__(self)
130 is_section_key = self.__class__._is_section_key.__get__(self)
131
131
132 # Because we use this for an exec namespace, we need to delegate
132 # Because we use this for an exec namespace, we need to delegate
133 # the lookup of names in __builtin__ to itself. This means
133 # the lookup of names in __builtin__ to itself. This means
134 # that you can't have section or attribute names that are
134 # that you can't have section or attribute names that are
135 # builtins.
135 # builtins.
136 try:
136 try:
137 return getattr(builtin_mod, key)
137 return getattr(builtin_mod, key)
138 except AttributeError:
138 except AttributeError:
139 pass
139 pass
140 if is_section_key(key):
140 if is_section_key(key):
141 try:
141 try:
142 return dict.__getitem__(self, key)
142 return dict.__getitem__(self, key)
143 except KeyError:
143 except KeyError:
144 c = Config()
144 c = Config()
145 dict.__setitem__(self, key, c)
145 dict.__setitem__(self, key, c)
146 return c
146 return c
147 else:
147 else:
148 return dict.__getitem__(self, key)
148 return dict.__getitem__(self, key)
149
149
150 def __setitem__(self, key, value):
150 def __setitem__(self, key, value):
151 # Don't allow names in __builtin__ to be modified.
151 # Don't allow names in __builtin__ to be modified.
152 if hasattr(builtin_mod, key):
152 if hasattr(builtin_mod, key):
153 raise ConfigError('Config variable names cannot have the same name '
153 raise ConfigError('Config variable names cannot have the same name '
154 'as a Python builtin: %s' % key)
154 'as a Python builtin: %s' % key)
155 if self._is_section_key(key):
155 if self._is_section_key(key):
156 if not isinstance(value, Config):
156 if not isinstance(value, Config):
157 raise ValueError('values whose keys begin with an uppercase '
157 raise ValueError('values whose keys begin with an uppercase '
158 'char must be Config instances: %r, %r' % (key, value))
158 'char must be Config instances: %r, %r' % (key, value))
159 else:
159 else:
160 dict.__setitem__(self, key, value)
160 dict.__setitem__(self, key, value)
161
161
162 def __getattr__(self, key):
162 def __getattr__(self, key):
163 try:
163 try:
164 return self.__getitem__(key)
164 return self.__getitem__(key)
165 except KeyError, e:
165 except KeyError, e:
166 raise AttributeError(e)
166 raise AttributeError(e)
167
167
168 def __setattr__(self, key, value):
168 def __setattr__(self, key, value):
169 try:
169 try:
170 self.__setitem__(key, value)
170 self.__setitem__(key, value)
171 except KeyError, e:
171 except KeyError, e:
172 raise AttributeError(e)
172 raise AttributeError(e)
173
173
174 def __delattr__(self, key):
174 def __delattr__(self, key):
175 try:
175 try:
176 dict.__delitem__(self, key)
176 dict.__delitem__(self, key)
177 except KeyError, e:
177 except KeyError, e:
178 raise AttributeError(e)
178 raise AttributeError(e)
179
179
180
180
181 #-----------------------------------------------------------------------------
181 #-----------------------------------------------------------------------------
182 # Config loading classes
182 # Config loading classes
183 #-----------------------------------------------------------------------------
183 #-----------------------------------------------------------------------------
184
184
185
185
186 class ConfigLoader(object):
186 class ConfigLoader(object):
187 """A object for loading configurations from just about anywhere.
187 """A object for loading configurations from just about anywhere.
188
188
189 The resulting configuration is packaged as a :class:`Struct`.
189 The resulting configuration is packaged as a :class:`Struct`.
190
190
191 Notes
191 Notes
192 -----
192 -----
193 A :class:`ConfigLoader` does one thing: load a config from a source
193 A :class:`ConfigLoader` does one thing: load a config from a source
194 (file, command line arguments) and returns the data as a :class:`Struct`.
194 (file, command line arguments) and returns the data as a :class:`Struct`.
195 There are lots of things that :class:`ConfigLoader` does not do. It does
195 There are lots of things that :class:`ConfigLoader` does not do. It does
196 not implement complex logic for finding config files. It does not handle
196 not implement complex logic for finding config files. It does not handle
197 default values or merge multiple configs. These things need to be
197 default values or merge multiple configs. These things need to be
198 handled elsewhere.
198 handled elsewhere.
199 """
199 """
200
200
201 def __init__(self):
201 def __init__(self):
202 """A base class for config loaders.
202 """A base class for config loaders.
203
203
204 Examples
204 Examples
205 --------
205 --------
206
206
207 >>> cl = ConfigLoader()
207 >>> cl = ConfigLoader()
208 >>> config = cl.load_config()
208 >>> config = cl.load_config()
209 >>> config
209 >>> config
210 {}
210 {}
211 """
211 """
212 self.clear()
212 self.clear()
213
213
214 def clear(self):
214 def clear(self):
215 self.config = Config()
215 self.config = Config()
216
216
217 def load_config(self):
217 def load_config(self):
218 """Load a config from somewhere, return a :class:`Config` instance.
218 """Load a config from somewhere, return a :class:`Config` instance.
219
219
220 Usually, this will cause self.config to be set and then returned.
220 Usually, this will cause self.config to be set and then returned.
221 However, in most cases, :meth:`ConfigLoader.clear` should be called
221 However, in most cases, :meth:`ConfigLoader.clear` should be called
222 to erase any previous state.
222 to erase any previous state.
223 """
223 """
224 self.clear()
224 self.clear()
225 return self.config
225 return self.config
226
226
227
227
228 class FileConfigLoader(ConfigLoader):
228 class FileConfigLoader(ConfigLoader):
229 """A base class for file based configurations.
229 """A base class for file based configurations.
230
230
231 As we add more file based config loaders, the common logic should go
231 As we add more file based config loaders, the common logic should go
232 here.
232 here.
233 """
233 """
234 pass
234 pass
235
235
236
236
237 class PyFileConfigLoader(FileConfigLoader):
237 class PyFileConfigLoader(FileConfigLoader):
238 """A config loader for pure python files.
238 """A config loader for pure python files.
239
239
240 This calls execfile on a plain python file and looks for attributes
240 This calls execfile on a plain python file and looks for attributes
241 that are all caps. These attribute are added to the config Struct.
241 that are all caps. These attribute are added to the config Struct.
242 """
242 """
243
243
244 def __init__(self, filename, path=None):
244 def __init__(self, filename, path=None):
245 """Build a config loader for a filename and path.
245 """Build a config loader for a filename and path.
246
246
247 Parameters
247 Parameters
248 ----------
248 ----------
249 filename : str
249 filename : str
250 The file name of the config file.
250 The file name of the config file.
251 path : str, list, tuple
251 path : str, list, tuple
252 The path to search for the config file on, or a sequence of
252 The path to search for the config file on, or a sequence of
253 paths to try in order.
253 paths to try in order.
254 """
254 """
255 super(PyFileConfigLoader, self).__init__()
255 super(PyFileConfigLoader, self).__init__()
256 self.filename = filename
256 self.filename = filename
257 self.path = path
257 self.path = path
258 self.full_filename = ''
258 self.full_filename = ''
259 self.data = None
259 self.data = None
260
260
261 def load_config(self):
261 def load_config(self):
262 """Load the config from a file and return it as a Struct."""
262 """Load the config from a file and return it as a Struct."""
263 self.clear()
263 self.clear()
264 try:
264 try:
265 self._find_file()
265 self._find_file()
266 except IOError as e:
266 except IOError as e:
267 raise ConfigFileNotFound(str(e))
267 raise ConfigFileNotFound(str(e))
268 self._read_file_as_dict()
268 self._read_file_as_dict()
269 self._convert_to_config()
269 self._convert_to_config()
270 return self.config
270 return self.config
271
271
272 def _find_file(self):
272 def _find_file(self):
273 """Try to find the file by searching the paths."""
273 """Try to find the file by searching the paths."""
274 self.full_filename = filefind(self.filename, self.path)
274 self.full_filename = filefind(self.filename, self.path)
275
275
276 def _read_file_as_dict(self):
276 def _read_file_as_dict(self):
277 """Load the config file into self.config, with recursive loading."""
277 """Load the config file into self.config, with recursive loading."""
278 # This closure is made available in the namespace that is used
278 # This closure is made available in the namespace that is used
279 # to exec the config file. It allows users to call
279 # to exec the config file. It allows users to call
280 # load_subconfig('myconfig.py') to load config files recursively.
280 # load_subconfig('myconfig.py') to load config files recursively.
281 # It needs to be a closure because it has references to self.path
281 # It needs to be a closure because it has references to self.path
282 # and self.config. The sub-config is loaded with the same path
282 # and self.config. The sub-config is loaded with the same path
283 # as the parent, but it uses an empty config which is then merged
283 # as the parent, but it uses an empty config which is then merged
284 # with the parents.
284 # with the parents.
285
285
286 # If a profile is specified, the config file will be loaded
286 # If a profile is specified, the config file will be loaded
287 # from that profile
287 # from that profile
288
288
289 def load_subconfig(fname, profile=None):
289 def load_subconfig(fname, profile=None):
290 # import here to prevent circular imports
290 # import here to prevent circular imports
291 from IPython.core.profiledir import ProfileDir, ProfileDirError
291 from IPython.core.profiledir import ProfileDir, ProfileDirError
292 if profile is not None:
292 if profile is not None:
293 try:
293 try:
294 profile_dir = ProfileDir.find_profile_dir_by_name(
294 profile_dir = ProfileDir.find_profile_dir_by_name(
295 get_ipython_dir(),
295 get_ipython_dir(),
296 profile,
296 profile,
297 )
297 )
298 except ProfileDirError:
298 except ProfileDirError:
299 return
299 return
300 path = profile_dir.location
300 path = profile_dir.location
301 else:
301 else:
302 path = self.path
302 path = self.path
303 loader = PyFileConfigLoader(fname, path)
303 loader = PyFileConfigLoader(fname, path)
304 try:
304 try:
305 sub_config = loader.load_config()
305 sub_config = loader.load_config()
306 except ConfigFileNotFound:
306 except ConfigFileNotFound:
307 # Pass silently if the sub config is not there. This happens
307 # Pass silently if the sub config is not there. This happens
308 # when a user s using a profile, but not the default config.
308 # when a user s using a profile, but not the default config.
309 pass
309 pass
310 else:
310 else:
311 self.config._merge(sub_config)
311 self.config._merge(sub_config)
312
312
313 # Again, this needs to be a closure and should be used in config
313 # Again, this needs to be a closure and should be used in config
314 # files to get the config being loaded.
314 # files to get the config being loaded.
315 def get_config():
315 def get_config():
316 return self.config
316 return self.config
317
317
318 namespace = dict(load_subconfig=load_subconfig, get_config=get_config)
318 namespace = dict(load_subconfig=load_subconfig, get_config=get_config)
319 fs_encoding = sys.getfilesystemencoding() or 'ascii'
319 fs_encoding = sys.getfilesystemencoding() or 'ascii'
320 conf_filename = self.full_filename.encode(fs_encoding)
320 conf_filename = self.full_filename.encode(fs_encoding)
321 py3compat.execfile(conf_filename, namespace)
321 py3compat.execfile(conf_filename, namespace)
322
322
323 def _convert_to_config(self):
323 def _convert_to_config(self):
324 if self.data is None:
324 if self.data is None:
325 ConfigLoaderError('self.data does not exist')
325 ConfigLoaderError('self.data does not exist')
326
326
327
327
328 class CommandLineConfigLoader(ConfigLoader):
328 class CommandLineConfigLoader(ConfigLoader):
329 """A config loader for command line arguments.
329 """A config loader for command line arguments.
330
330
331 As we add more command line based loaders, the common logic should go
331 As we add more command line based loaders, the common logic should go
332 here.
332 here.
333 """
333 """
334
334
335 def _exec_config_str(self, lhs, rhs):
335 def _exec_config_str(self, lhs, rhs):
336 """execute self.config.<lhs>=<rhs>
336 """execute self.config.<lhs>=<rhs>
337
337
338 * expands ~ with expanduser
338 * expands ~ with expanduser
339 * tries to assign with raw exec, otherwise assigns with just the string,
339 * tries to assign with raw exec, otherwise assigns with just the string,
340 allowing `--C.a=foobar` and `--C.a="foobar"` to be equivalent. *Not*
340 allowing `--C.a=foobar` and `--C.a="foobar"` to be equivalent. *Not*
341 equivalent are `--C.a=4` and `--C.a='4'`.
341 equivalent are `--C.a=4` and `--C.a='4'`.
342 """
342 """
343 rhs = os.path.expanduser(rhs)
343 rhs = os.path.expanduser(rhs)
344 exec_str = 'self.config.' + lhs + '=' + rhs
344 exec_str = 'self.config.' + lhs + '=' + rhs
345 try:
345 try:
346 # Try to see if regular Python syntax will work. This
346 # Try to see if regular Python syntax will work. This
347 # won't handle strings as the quote marks are removed
347 # won't handle strings as the quote marks are removed
348 # by the system shell.
348 # by the system shell.
349 exec exec_str in locals(), globals()
349 exec exec_str in locals(), globals()
350 except (NameError, SyntaxError):
350 except (NameError, SyntaxError):
351 # This case happens if the rhs is a string but without
351 # This case happens if the rhs is a string but without
352 # the quote marks. Use repr, to get quote marks, and
352 # the quote marks. Use repr, to get quote marks, and
353 # 'u' prefix and see if
353 # 'u' prefix and see if
354 # it succeeds. If it still fails, we let it raise.
354 # it succeeds. If it still fails, we let it raise.
355 exec_str = u'self.config.' + lhs + '= rhs'
355 exec_str = u'self.config.' + lhs + '= rhs'
356 exec exec_str in locals(), globals()
356 exec exec_str in locals(), globals()
357
357
358 def _load_flag(self, cfg):
358 def _load_flag(self, cfg):
359 """update self.config from a flag, which can be a dict or Config"""
359 """update self.config from a flag, which can be a dict or Config"""
360 if isinstance(cfg, (dict, Config)):
360 if isinstance(cfg, (dict, Config)):
361 # don't clobber whole config sections, update
361 # don't clobber whole config sections, update
362 # each section from config:
362 # each section from config:
363 for sec,c in cfg.iteritems():
363 for sec,c in cfg.iteritems():
364 self.config[sec].update(c)
364 self.config[sec].update(c)
365 else:
365 else:
366 raise TypeError("Invalid flag: %r" % cfg)
366 raise TypeError("Invalid flag: %r" % cfg)
367
367
368 # raw --identifier=value pattern
368 # raw --identifier=value pattern
369 # but *also* accept '-' as wordsep, for aliases
369 # but *also* accept '-' as wordsep, for aliases
370 # accepts: --foo=a
370 # accepts: --foo=a
371 # --Class.trait=value
371 # --Class.trait=value
372 # --alias-name=value
372 # --alias-name=value
373 # rejects: -foo=value
373 # rejects: -foo=value
374 # --foo
374 # --foo
375 # --Class.trait
375 # --Class.trait
376 kv_pattern = re.compile(r'\-\-[A-Za-z][\w\-]*(\.[\w\-]+)*\=.*')
376 kv_pattern = re.compile(r'\-\-[A-Za-z][\w\-]*(\.[\w\-]+)*\=.*')
377
377
378 # just flags, no assignments, with two *or one* leading '-'
378 # just flags, no assignments, with two *or one* leading '-'
379 # accepts: --foo
379 # accepts: --foo
380 # -foo-bar-again
380 # -foo-bar-again
381 # rejects: --anything=anything
381 # rejects: --anything=anything
382 # --two.word
382 # --two.word
383
383
384 flag_pattern = re.compile(r'\-\-?\w+[\-\w]*$')
384 flag_pattern = re.compile(r'\-\-?\w+[\-\w]*$')
385
385
386 class KeyValueConfigLoader(CommandLineConfigLoader):
386 class KeyValueConfigLoader(CommandLineConfigLoader):
387 """A config loader that loads key value pairs from the command line.
387 """A config loader that loads key value pairs from the command line.
388
388
389 This allows command line options to be gives in the following form::
389 This allows command line options to be gives in the following form::
390
390
391 ipython --profile="foo" --InteractiveShell.autocall=False
391 ipython --profile="foo" --InteractiveShell.autocall=False
392 """
392 """
393
393
394 def __init__(self, argv=None, aliases=None, flags=None):
394 def __init__(self, argv=None, aliases=None, flags=None):
395 """Create a key value pair config loader.
395 """Create a key value pair config loader.
396
396
397 Parameters
397 Parameters
398 ----------
398 ----------
399 argv : list
399 argv : list
400 A list that has the form of sys.argv[1:] which has unicode
400 A list that has the form of sys.argv[1:] which has unicode
401 elements of the form u"key=value". If this is None (default),
401 elements of the form u"key=value". If this is None (default),
402 then sys.argv[1:] will be used.
402 then sys.argv[1:] will be used.
403 aliases : dict
403 aliases : dict
404 A dict of aliases for configurable traits.
404 A dict of aliases for configurable traits.
405 Keys are the short aliases, Values are the resolved trait.
405 Keys are the short aliases, Values are the resolved trait.
406 Of the form: `{'alias' : 'Configurable.trait'}`
406 Of the form: `{'alias' : 'Configurable.trait'}`
407 flags : dict
407 flags : dict
408 A dict of flags, keyed by str name. Vaues can be Config objects,
408 A dict of flags, keyed by str name. Vaues can be Config objects,
409 dicts, or "key=value" strings. If Config or dict, when the flag
409 dicts, or "key=value" strings. If Config or dict, when the flag
410 is triggered, The flag is loaded as `self.config.update(m)`.
410 is triggered, The flag is loaded as `self.config.update(m)`.
411
411
412 Returns
412 Returns
413 -------
413 -------
414 config : Config
414 config : Config
415 The resulting Config object.
415 The resulting Config object.
416
416
417 Examples
417 Examples
418 --------
418 --------
419
419
420 >>> from IPython.config.loader import KeyValueConfigLoader
420 >>> from IPython.config.loader import KeyValueConfigLoader
421 >>> cl = KeyValueConfigLoader()
421 >>> cl = KeyValueConfigLoader()
422 >>> cl.load_config(["--A.name='brian'","--B.number=0"])
422 >>> cl.load_config(["--A.name='brian'","--B.number=0"])
423 {'A': {'name': 'brian'}, 'B': {'number': 0}}
423 {'A': {'name': 'brian'}, 'B': {'number': 0}}
424 """
424 """
425 self.clear()
425 self.clear()
426 if argv is None:
426 if argv is None:
427 argv = sys.argv[1:]
427 argv = sys.argv[1:]
428 self.argv = argv
428 self.argv = argv
429 self.aliases = aliases or {}
429 self.aliases = aliases or {}
430 self.flags = flags or {}
430 self.flags = flags or {}
431
431
432
432
433 def clear(self):
433 def clear(self):
434 super(KeyValueConfigLoader, self).clear()
434 super(KeyValueConfigLoader, self).clear()
435 self.extra_args = []
435 self.extra_args = []
436
436
437
437
438 def _decode_argv(self, argv, enc=None):
438 def _decode_argv(self, argv, enc=None):
439 """decode argv if bytes, using stin.encoding, falling back on default enc"""
439 """decode argv if bytes, using stin.encoding, falling back on default enc"""
440 uargv = []
440 uargv = []
441 if enc is None:
441 if enc is None:
442 enc = text.getdefaultencoding()
442 enc = py3compat.getdefaultencoding()
443 for arg in argv:
443 for arg in argv:
444 if not isinstance(arg, unicode):
444 if not isinstance(arg, unicode):
445 # only decode if not already decoded
445 # only decode if not already decoded
446 arg = arg.decode(enc)
446 arg = arg.decode(enc)
447 uargv.append(arg)
447 uargv.append(arg)
448 return uargv
448 return uargv
449
449
450
450
451 def load_config(self, argv=None, aliases=None, flags=None):
451 def load_config(self, argv=None, aliases=None, flags=None):
452 """Parse the configuration and generate the Config object.
452 """Parse the configuration and generate the Config object.
453
453
454 After loading, any arguments that are not key-value or
454 After loading, any arguments that are not key-value or
455 flags will be stored in self.extra_args - a list of
455 flags will be stored in self.extra_args - a list of
456 unparsed command-line arguments. This is used for
456 unparsed command-line arguments. This is used for
457 arguments such as input files or subcommands.
457 arguments such as input files or subcommands.
458
458
459 Parameters
459 Parameters
460 ----------
460 ----------
461 argv : list, optional
461 argv : list, optional
462 A list that has the form of sys.argv[1:] which has unicode
462 A list that has the form of sys.argv[1:] which has unicode
463 elements of the form u"key=value". If this is None (default),
463 elements of the form u"key=value". If this is None (default),
464 then self.argv will be used.
464 then self.argv will be used.
465 aliases : dict
465 aliases : dict
466 A dict of aliases for configurable traits.
466 A dict of aliases for configurable traits.
467 Keys are the short aliases, Values are the resolved trait.
467 Keys are the short aliases, Values are the resolved trait.
468 Of the form: `{'alias' : 'Configurable.trait'}`
468 Of the form: `{'alias' : 'Configurable.trait'}`
469 flags : dict
469 flags : dict
470 A dict of flags, keyed by str name. Values can be Config objects
470 A dict of flags, keyed by str name. Values can be Config objects
471 or dicts. When the flag is triggered, The config is loaded as
471 or dicts. When the flag is triggered, The config is loaded as
472 `self.config.update(cfg)`.
472 `self.config.update(cfg)`.
473 """
473 """
474 from IPython.config.configurable import Configurable
474 from IPython.config.configurable import Configurable
475
475
476 self.clear()
476 self.clear()
477 if argv is None:
477 if argv is None:
478 argv = self.argv
478 argv = self.argv
479 if aliases is None:
479 if aliases is None:
480 aliases = self.aliases
480 aliases = self.aliases
481 if flags is None:
481 if flags is None:
482 flags = self.flags
482 flags = self.flags
483
483
484 # ensure argv is a list of unicode strings:
484 # ensure argv is a list of unicode strings:
485 uargv = self._decode_argv(argv)
485 uargv = self._decode_argv(argv)
486 for idx,raw in enumerate(uargv):
486 for idx,raw in enumerate(uargv):
487 # strip leading '-'
487 # strip leading '-'
488 item = raw.lstrip('-')
488 item = raw.lstrip('-')
489
489
490 if raw == '--':
490 if raw == '--':
491 # don't parse arguments after '--'
491 # don't parse arguments after '--'
492 # this is useful for relaying arguments to scripts, e.g.
492 # this is useful for relaying arguments to scripts, e.g.
493 # ipython -i foo.py --pylab=qt -- args after '--' go-to-foo.py
493 # ipython -i foo.py --pylab=qt -- args after '--' go-to-foo.py
494 self.extra_args.extend(uargv[idx+1:])
494 self.extra_args.extend(uargv[idx+1:])
495 break
495 break
496
496
497 if kv_pattern.match(raw):
497 if kv_pattern.match(raw):
498 lhs,rhs = item.split('=',1)
498 lhs,rhs = item.split('=',1)
499 # Substitute longnames for aliases.
499 # Substitute longnames for aliases.
500 if lhs in aliases:
500 if lhs in aliases:
501 lhs = aliases[lhs]
501 lhs = aliases[lhs]
502 if '.' not in lhs:
502 if '.' not in lhs:
503 # probably a mistyped alias, but not technically illegal
503 # probably a mistyped alias, but not technically illegal
504 warn.warn("Unrecognized alias: '%s', it will probably have no effect."%lhs)
504 warn.warn("Unrecognized alias: '%s', it will probably have no effect."%lhs)
505 try:
505 try:
506 self._exec_config_str(lhs, rhs)
506 self._exec_config_str(lhs, rhs)
507 except Exception:
507 except Exception:
508 raise ArgumentError("Invalid argument: '%s'" % raw)
508 raise ArgumentError("Invalid argument: '%s'" % raw)
509
509
510 elif flag_pattern.match(raw):
510 elif flag_pattern.match(raw):
511 if item in flags:
511 if item in flags:
512 cfg,help = flags[item]
512 cfg,help = flags[item]
513 self._load_flag(cfg)
513 self._load_flag(cfg)
514 else:
514 else:
515 raise ArgumentError("Unrecognized flag: '%s'"%raw)
515 raise ArgumentError("Unrecognized flag: '%s'"%raw)
516 elif raw.startswith('-'):
516 elif raw.startswith('-'):
517 kv = '--'+item
517 kv = '--'+item
518 if kv_pattern.match(kv):
518 if kv_pattern.match(kv):
519 raise ArgumentError("Invalid argument: '%s', did you mean '%s'?"%(raw, kv))
519 raise ArgumentError("Invalid argument: '%s', did you mean '%s'?"%(raw, kv))
520 else:
520 else:
521 raise ArgumentError("Invalid argument: '%s'"%raw)
521 raise ArgumentError("Invalid argument: '%s'"%raw)
522 else:
522 else:
523 # keep all args that aren't valid in a list,
523 # keep all args that aren't valid in a list,
524 # in case our parent knows what to do with them.
524 # in case our parent knows what to do with them.
525 self.extra_args.append(item)
525 self.extra_args.append(item)
526 return self.config
526 return self.config
527
527
528 class ArgParseConfigLoader(CommandLineConfigLoader):
528 class ArgParseConfigLoader(CommandLineConfigLoader):
529 """A loader that uses the argparse module to load from the command line."""
529 """A loader that uses the argparse module to load from the command line."""
530
530
531 def __init__(self, argv=None, aliases=None, flags=None, *parser_args, **parser_kw):
531 def __init__(self, argv=None, aliases=None, flags=None, *parser_args, **parser_kw):
532 """Create a config loader for use with argparse.
532 """Create a config loader for use with argparse.
533
533
534 Parameters
534 Parameters
535 ----------
535 ----------
536
536
537 argv : optional, list
537 argv : optional, list
538 If given, used to read command-line arguments from, otherwise
538 If given, used to read command-line arguments from, otherwise
539 sys.argv[1:] is used.
539 sys.argv[1:] is used.
540
540
541 parser_args : tuple
541 parser_args : tuple
542 A tuple of positional arguments that will be passed to the
542 A tuple of positional arguments that will be passed to the
543 constructor of :class:`argparse.ArgumentParser`.
543 constructor of :class:`argparse.ArgumentParser`.
544
544
545 parser_kw : dict
545 parser_kw : dict
546 A tuple of keyword arguments that will be passed to the
546 A tuple of keyword arguments that will be passed to the
547 constructor of :class:`argparse.ArgumentParser`.
547 constructor of :class:`argparse.ArgumentParser`.
548
548
549 Returns
549 Returns
550 -------
550 -------
551 config : Config
551 config : Config
552 The resulting Config object.
552 The resulting Config object.
553 """
553 """
554 super(CommandLineConfigLoader, self).__init__()
554 super(CommandLineConfigLoader, self).__init__()
555 self.clear()
555 self.clear()
556 if argv is None:
556 if argv is None:
557 argv = sys.argv[1:]
557 argv = sys.argv[1:]
558 self.argv = argv
558 self.argv = argv
559 self.aliases = aliases or {}
559 self.aliases = aliases or {}
560 self.flags = flags or {}
560 self.flags = flags or {}
561
561
562 self.parser_args = parser_args
562 self.parser_args = parser_args
563 self.version = parser_kw.pop("version", None)
563 self.version = parser_kw.pop("version", None)
564 kwargs = dict(argument_default=argparse.SUPPRESS)
564 kwargs = dict(argument_default=argparse.SUPPRESS)
565 kwargs.update(parser_kw)
565 kwargs.update(parser_kw)
566 self.parser_kw = kwargs
566 self.parser_kw = kwargs
567
567
568 def load_config(self, argv=None, aliases=None, flags=None):
568 def load_config(self, argv=None, aliases=None, flags=None):
569 """Parse command line arguments and return as a Config object.
569 """Parse command line arguments and return as a Config object.
570
570
571 Parameters
571 Parameters
572 ----------
572 ----------
573
573
574 args : optional, list
574 args : optional, list
575 If given, a list with the structure of sys.argv[1:] to parse
575 If given, a list with the structure of sys.argv[1:] to parse
576 arguments from. If not given, the instance's self.argv attribute
576 arguments from. If not given, the instance's self.argv attribute
577 (given at construction time) is used."""
577 (given at construction time) is used."""
578 self.clear()
578 self.clear()
579 if argv is None:
579 if argv is None:
580 argv = self.argv
580 argv = self.argv
581 if aliases is None:
581 if aliases is None:
582 aliases = self.aliases
582 aliases = self.aliases
583 if flags is None:
583 if flags is None:
584 flags = self.flags
584 flags = self.flags
585 self._create_parser(aliases, flags)
585 self._create_parser(aliases, flags)
586 self._parse_args(argv)
586 self._parse_args(argv)
587 self._convert_to_config()
587 self._convert_to_config()
588 return self.config
588 return self.config
589
589
590 def get_extra_args(self):
590 def get_extra_args(self):
591 if hasattr(self, 'extra_args'):
591 if hasattr(self, 'extra_args'):
592 return self.extra_args
592 return self.extra_args
593 else:
593 else:
594 return []
594 return []
595
595
596 def _create_parser(self, aliases=None, flags=None):
596 def _create_parser(self, aliases=None, flags=None):
597 self.parser = ArgumentParser(*self.parser_args, **self.parser_kw)
597 self.parser = ArgumentParser(*self.parser_args, **self.parser_kw)
598 self._add_arguments(aliases, flags)
598 self._add_arguments(aliases, flags)
599
599
600 def _add_arguments(self, aliases=None, flags=None):
600 def _add_arguments(self, aliases=None, flags=None):
601 raise NotImplementedError("subclasses must implement _add_arguments")
601 raise NotImplementedError("subclasses must implement _add_arguments")
602
602
603 def _parse_args(self, args):
603 def _parse_args(self, args):
604 """self.parser->self.parsed_data"""
604 """self.parser->self.parsed_data"""
605 # decode sys.argv to support unicode command-line options
605 # decode sys.argv to support unicode command-line options
606 enc = text.getdefaultencoding()
606 enc = py3compat.getdefaultencoding()
607 uargs = [py3compat.cast_unicode(a, enc) for a in args]
607 uargs = [py3compat.cast_unicode(a, enc) for a in args]
608 self.parsed_data, self.extra_args = self.parser.parse_known_args(uargs)
608 self.parsed_data, self.extra_args = self.parser.parse_known_args(uargs)
609
609
610 def _convert_to_config(self):
610 def _convert_to_config(self):
611 """self.parsed_data->self.config"""
611 """self.parsed_data->self.config"""
612 for k, v in vars(self.parsed_data).iteritems():
612 for k, v in vars(self.parsed_data).iteritems():
613 exec "self.config.%s = v"%k in locals(), globals()
613 exec "self.config.%s = v"%k in locals(), globals()
614
614
615 class KVArgParseConfigLoader(ArgParseConfigLoader):
615 class KVArgParseConfigLoader(ArgParseConfigLoader):
616 """A config loader that loads aliases and flags with argparse,
616 """A config loader that loads aliases and flags with argparse,
617 but will use KVLoader for the rest. This allows better parsing
617 but will use KVLoader for the rest. This allows better parsing
618 of common args, such as `ipython -c 'print 5'`, but still gets
618 of common args, such as `ipython -c 'print 5'`, but still gets
619 arbitrary config with `ipython --InteractiveShell.use_readline=False`"""
619 arbitrary config with `ipython --InteractiveShell.use_readline=False`"""
620
620
621 def _convert_to_config(self):
621 def _convert_to_config(self):
622 """self.parsed_data->self.config"""
622 """self.parsed_data->self.config"""
623 for k, v in vars(self.parsed_data).iteritems():
623 for k, v in vars(self.parsed_data).iteritems():
624 self._exec_config_str(k, v)
624 self._exec_config_str(k, v)
625
625
626 def _add_arguments(self, aliases=None, flags=None):
626 def _add_arguments(self, aliases=None, flags=None):
627 self.alias_flags = {}
627 self.alias_flags = {}
628 # print aliases, flags
628 # print aliases, flags
629 if aliases is None:
629 if aliases is None:
630 aliases = self.aliases
630 aliases = self.aliases
631 if flags is None:
631 if flags is None:
632 flags = self.flags
632 flags = self.flags
633 paa = self.parser.add_argument
633 paa = self.parser.add_argument
634 for key,value in aliases.iteritems():
634 for key,value in aliases.iteritems():
635 if key in flags:
635 if key in flags:
636 # flags
636 # flags
637 nargs = '?'
637 nargs = '?'
638 else:
638 else:
639 nargs = None
639 nargs = None
640 if len(key) is 1:
640 if len(key) is 1:
641 paa('-'+key, '--'+key, type=unicode, dest=value, nargs=nargs)
641 paa('-'+key, '--'+key, type=unicode, dest=value, nargs=nargs)
642 else:
642 else:
643 paa('--'+key, type=unicode, dest=value, nargs=nargs)
643 paa('--'+key, type=unicode, dest=value, nargs=nargs)
644 for key, (value, help) in flags.iteritems():
644 for key, (value, help) in flags.iteritems():
645 if key in self.aliases:
645 if key in self.aliases:
646 #
646 #
647 self.alias_flags[self.aliases[key]] = value
647 self.alias_flags[self.aliases[key]] = value
648 continue
648 continue
649 if len(key) is 1:
649 if len(key) is 1:
650 paa('-'+key, '--'+key, action='append_const', dest='_flags', const=value)
650 paa('-'+key, '--'+key, action='append_const', dest='_flags', const=value)
651 else:
651 else:
652 paa('--'+key, action='append_const', dest='_flags', const=value)
652 paa('--'+key, action='append_const', dest='_flags', const=value)
653
653
654 def _convert_to_config(self):
654 def _convert_to_config(self):
655 """self.parsed_data->self.config, parse unrecognized extra args via KVLoader."""
655 """self.parsed_data->self.config, parse unrecognized extra args via KVLoader."""
656 # remove subconfigs list from namespace before transforming the Namespace
656 # remove subconfigs list from namespace before transforming the Namespace
657 if '_flags' in self.parsed_data:
657 if '_flags' in self.parsed_data:
658 subcs = self.parsed_data._flags
658 subcs = self.parsed_data._flags
659 del self.parsed_data._flags
659 del self.parsed_data._flags
660 else:
660 else:
661 subcs = []
661 subcs = []
662
662
663 for k, v in vars(self.parsed_data).iteritems():
663 for k, v in vars(self.parsed_data).iteritems():
664 if v is None:
664 if v is None:
665 # it was a flag that shares the name of an alias
665 # it was a flag that shares the name of an alias
666 subcs.append(self.alias_flags[k])
666 subcs.append(self.alias_flags[k])
667 else:
667 else:
668 # eval the KV assignment
668 # eval the KV assignment
669 self._exec_config_str(k, v)
669 self._exec_config_str(k, v)
670
670
671 for subc in subcs:
671 for subc in subcs:
672 self._load_flag(subc)
672 self._load_flag(subc)
673
673
674 if self.extra_args:
674 if self.extra_args:
675 sub_parser = KeyValueConfigLoader()
675 sub_parser = KeyValueConfigLoader()
676 sub_parser.load_config(self.extra_args)
676 sub_parser.load_config(self.extra_args)
677 self.config._merge(sub_parser.config)
677 self.config._merge(sub_parser.config)
678 self.extra_args = sub_parser.extra_args
678 self.extra_args = sub_parser.extra_args
679
679
680
680
681 def load_pyconfig_files(config_files, path):
681 def load_pyconfig_files(config_files, path):
682 """Load multiple Python config files, merging each of them in turn.
682 """Load multiple Python config files, merging each of them in turn.
683
683
684 Parameters
684 Parameters
685 ==========
685 ==========
686 config_files : list of str
686 config_files : list of str
687 List of config files names to load and merge into the config.
687 List of config files names to load and merge into the config.
688 path : unicode
688 path : unicode
689 The full path to the location of the config files.
689 The full path to the location of the config files.
690 """
690 """
691 config = Config()
691 config = Config()
692 for cf in config_files:
692 for cf in config_files:
693 loader = PyFileConfigLoader(cf, path=path)
693 loader = PyFileConfigLoader(cf, path=path)
694 try:
694 try:
695 next_config = loader.load_config()
695 next_config = loader.load_config()
696 except ConfigFileNotFound:
696 except ConfigFileNotFound:
697 pass
697 pass
698 except:
698 except:
699 raise
699 raise
700 else:
700 else:
701 config._merge(next_config)
701 config._merge(next_config)
702 return config
702 return config
@@ -1,59 +1,59 b''
1 """Support for interactive macros in IPython"""
1 """Support for interactive macros in IPython"""
2
2
3 #*****************************************************************************
3 #*****************************************************************************
4 # Copyright (C) 2001-2005 Fernando Perez <fperez@colorado.edu>
4 # Copyright (C) 2001-2005 Fernando Perez <fperez@colorado.edu>
5 #
5 #
6 # Distributed under the terms of the BSD License. The full license is in
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING, distributed as part of this software.
7 # the file COPYING, distributed as part of this software.
8 #*****************************************************************************
8 #*****************************************************************************
9
9
10 import re
10 import re
11 import sys
11 import sys
12
12
13 from IPython.utils import py3compat
13 from IPython.utils import py3compat
14
14
15 coding_declaration = re.compile(r"#\s*coding[:=]\s*([-\w.]+)")
15 coding_declaration = re.compile(r"#\s*coding[:=]\s*([-\w.]+)")
16
16
17 class Macro(object):
17 class Macro(object):
18 """Simple class to store the value of macros as strings.
18 """Simple class to store the value of macros as strings.
19
19
20 Macro is just a callable that executes a string of IPython
20 Macro is just a callable that executes a string of IPython
21 input when called.
21 input when called.
22
22
23 Args to macro are available in _margv list if you need them.
23 Args to macro are available in _margv list if you need them.
24 """
24 """
25
25
26 def __init__(self,code):
26 def __init__(self,code):
27 """store the macro value, as a single string which can be executed"""
27 """store the macro value, as a single string which can be executed"""
28 lines = []
28 lines = []
29 enc = None
29 enc = None
30 for line in code.splitlines():
30 for line in code.splitlines():
31 coding_match = coding_declaration.match(line)
31 coding_match = coding_declaration.match(line)
32 if coding_match:
32 if coding_match:
33 enc = coding_match.group(1)
33 enc = coding_match.group(1)
34 else:
34 else:
35 lines.append(line)
35 lines.append(line)
36 code = "\n".join(lines)
36 code = "\n".join(lines)
37 if isinstance(code, bytes):
37 if isinstance(code, bytes):
38 code = code.decode(enc or sys.getdefaultencoding())
38 code = code.decode(enc or py3compat.getdefaultencoding())
39 self.value = code + '\n'
39 self.value = code + '\n'
40
40
41 def __str__(self):
41 def __str__(self):
42 return py3compat.unicode_to_str(self.value)
42 return py3compat.unicode_to_str(self.value)
43
43
44 def __unicode__(self):
44 def __unicode__(self):
45 return self.value
45 return self.value
46
46
47 def __repr__(self):
47 def __repr__(self):
48 return 'IPython.macro.Macro(%s)' % repr(self.value)
48 return 'IPython.macro.Macro(%s)' % repr(self.value)
49
49
50 def __getstate__(self):
50 def __getstate__(self):
51 """ needed for safe pickling via %store """
51 """ needed for safe pickling via %store """
52 return {'value': self.value}
52 return {'value': self.value}
53
53
54 def __add__(self, other):
54 def __add__(self, other):
55 if isinstance(other, Macro):
55 if isinstance(other, Macro):
56 return Macro(self.value + other.value)
56 return Macro(self.value + other.value)
57 elif isinstance(other, basestring):
57 elif isinstance(other, basestring):
58 return Macro(self.value + other)
58 return Macro(self.value + other)
59 raise TypeError
59 raise TypeError
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,75 +1,75 b''
1 # coding: utf-8
1 # coding: utf-8
2 """Tests for the compilerop module.
2 """Tests for the compilerop module.
3 """
3 """
4 #-----------------------------------------------------------------------------
4 #-----------------------------------------------------------------------------
5 # Copyright (C) 2010-2011 The IPython Development Team.
5 # Copyright (C) 2010-2011 The IPython Development Team.
6 #
6 #
7 # Distributed under the terms of the BSD License.
7 # Distributed under the terms of the BSD License.
8 #
8 #
9 # The full license is in the file COPYING.txt, distributed with this software.
9 # The full license is in the file COPYING.txt, distributed with this software.
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11
11
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13 # Imports
13 # Imports
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15 from __future__ import print_function
15 from __future__ import print_function
16
16
17 # Stdlib imports
17 # Stdlib imports
18 import linecache
18 import linecache
19 import sys
19 import sys
20
20
21 # Third-party imports
21 # Third-party imports
22 import nose.tools as nt
22 import nose.tools as nt
23
23
24 # Our own imports
24 # Our own imports
25 from IPython.core import compilerop
25 from IPython.core import compilerop
26 from IPython.utils import py3compat
26 from IPython.utils import py3compat
27
27
28 #-----------------------------------------------------------------------------
28 #-----------------------------------------------------------------------------
29 # Test functions
29 # Test functions
30 #-----------------------------------------------------------------------------
30 #-----------------------------------------------------------------------------
31
31
32 def test_code_name():
32 def test_code_name():
33 code = 'x=1'
33 code = 'x=1'
34 name = compilerop.code_name(code)
34 name = compilerop.code_name(code)
35 nt.assert_true(name.startswith('<ipython-input-0'))
35 nt.assert_true(name.startswith('<ipython-input-0'))
36
36
37
37
38 def test_code_name2():
38 def test_code_name2():
39 code = 'x=1'
39 code = 'x=1'
40 name = compilerop.code_name(code, 9)
40 name = compilerop.code_name(code, 9)
41 nt.assert_true(name.startswith('<ipython-input-9'))
41 nt.assert_true(name.startswith('<ipython-input-9'))
42
42
43
43
44 def test_cache():
44 def test_cache():
45 """Test the compiler correctly compiles and caches inputs
45 """Test the compiler correctly compiles and caches inputs
46 """
46 """
47 cp = compilerop.CachingCompiler()
47 cp = compilerop.CachingCompiler()
48 ncache = len(linecache.cache)
48 ncache = len(linecache.cache)
49 cp.cache('x=1')
49 cp.cache('x=1')
50 nt.assert_true(len(linecache.cache) > ncache)
50 nt.assert_true(len(linecache.cache) > ncache)
51
51
52 def setUp():
52 def setUp():
53 # Check we're in a proper Python 2 environment (some imports, such
53 # Check we're in a proper Python 2 environment (some imports, such
54 # as GTK, can change the default encoding, which can hide bugs.)
54 # as GTK, can change the default encoding, which can hide bugs.)
55 nt.assert_equal(sys.getdefaultencoding(), "utf-8" if py3compat.PY3 else "ascii")
55 nt.assert_equal(py3compat.getdefaultencoding(), "utf-8" if py3compat.PY3 else "ascii")
56
56
57 def test_cache_unicode():
57 def test_cache_unicode():
58 cp = compilerop.CachingCompiler()
58 cp = compilerop.CachingCompiler()
59 ncache = len(linecache.cache)
59 ncache = len(linecache.cache)
60 cp.cache(u"t = 'žćčőđ'")
60 cp.cache(u"t = 'žćčőđ'")
61 nt.assert_true(len(linecache.cache) > ncache)
61 nt.assert_true(len(linecache.cache) > ncache)
62
62
63 def test_compiler_check_cache():
63 def test_compiler_check_cache():
64 """Test the compiler properly manages the cache.
64 """Test the compiler properly manages the cache.
65 """
65 """
66 # Rather simple-minded tests that just exercise the API
66 # Rather simple-minded tests that just exercise the API
67 cp = compilerop.CachingCompiler()
67 cp = compilerop.CachingCompiler()
68 cp.cache('x=1', 99)
68 cp.cache('x=1', 99)
69 # Ensure now that after clearing the cache, our entries survive
69 # Ensure now that after clearing the cache, our entries survive
70 cp.check_cache()
70 cp.check_cache()
71 for k in linecache.cache:
71 for k in linecache.cache:
72 if k.startswith('<ipython-input-99'):
72 if k.startswith('<ipython-input-99'):
73 break
73 break
74 else:
74 else:
75 raise AssertionError('Entry for input-99 missing from linecache')
75 raise AssertionError('Entry for input-99 missing from linecache')
@@ -1,151 +1,151 b''
1 # coding: utf-8
1 # coding: utf-8
2 """Tests for the IPython tab-completion machinery.
2 """Tests for the IPython tab-completion machinery.
3 """
3 """
4 #-----------------------------------------------------------------------------
4 #-----------------------------------------------------------------------------
5 # Module imports
5 # Module imports
6 #-----------------------------------------------------------------------------
6 #-----------------------------------------------------------------------------
7
7
8 # stdlib
8 # stdlib
9 import os
9 import os
10 import shutil
10 import shutil
11 import sys
11 import sys
12 import tempfile
12 import tempfile
13 import unittest
13 import unittest
14 from datetime import datetime
14 from datetime import datetime
15
15
16 # third party
16 # third party
17 import nose.tools as nt
17 import nose.tools as nt
18
18
19 # our own packages
19 # our own packages
20 from IPython.config.loader import Config
20 from IPython.config.loader import Config
21 from IPython.utils.tempdir import TemporaryDirectory
21 from IPython.utils.tempdir import TemporaryDirectory
22 from IPython.core.history import HistoryManager, extract_hist_ranges
22 from IPython.core.history import HistoryManager, extract_hist_ranges
23 from IPython.utils import py3compat
23 from IPython.utils import py3compat
24
24
25 def setUp():
25 def setUp():
26 nt.assert_equal(sys.getdefaultencoding(), "utf-8" if py3compat.PY3 else "ascii")
26 nt.assert_equal(py3compat.getdefaultencoding(), "utf-8" if py3compat.PY3 else "ascii")
27
27
28 def test_history():
28 def test_history():
29 ip = get_ipython()
29 ip = get_ipython()
30 with TemporaryDirectory() as tmpdir:
30 with TemporaryDirectory() as tmpdir:
31 hist_manager_ori = ip.history_manager
31 hist_manager_ori = ip.history_manager
32 hist_file = os.path.join(tmpdir, 'history.sqlite')
32 hist_file = os.path.join(tmpdir, 'history.sqlite')
33 try:
33 try:
34 ip.history_manager = HistoryManager(shell=ip, hist_file=hist_file)
34 ip.history_manager = HistoryManager(shell=ip, hist_file=hist_file)
35 hist = [u'a=1', u'def f():\n test = 1\n return test', u"b='β‚¬Γ†ΒΎΓ·ΓŸ'"]
35 hist = [u'a=1', u'def f():\n test = 1\n return test', u"b='β‚¬Γ†ΒΎΓ·ΓŸ'"]
36 for i, h in enumerate(hist, start=1):
36 for i, h in enumerate(hist, start=1):
37 ip.history_manager.store_inputs(i, h)
37 ip.history_manager.store_inputs(i, h)
38
38
39 ip.history_manager.db_log_output = True
39 ip.history_manager.db_log_output = True
40 # Doesn't match the input, but we'll just check it's stored.
40 # Doesn't match the input, but we'll just check it's stored.
41 ip.history_manager.output_hist_reprs[3] = "spam"
41 ip.history_manager.output_hist_reprs[3] = "spam"
42 ip.history_manager.store_output(3)
42 ip.history_manager.store_output(3)
43
43
44 nt.assert_equal(ip.history_manager.input_hist_raw, [''] + hist)
44 nt.assert_equal(ip.history_manager.input_hist_raw, [''] + hist)
45
45
46 # Detailed tests for _get_range_session
46 # Detailed tests for _get_range_session
47 grs = ip.history_manager._get_range_session
47 grs = ip.history_manager._get_range_session
48 nt.assert_equal(list(grs(start=2,stop=-1)), zip([0], [2], hist[1:-1]))
48 nt.assert_equal(list(grs(start=2,stop=-1)), zip([0], [2], hist[1:-1]))
49 nt.assert_equal(list(grs(start=-2)), zip([0,0], [2,3], hist[-2:]))
49 nt.assert_equal(list(grs(start=-2)), zip([0,0], [2,3], hist[-2:]))
50 nt.assert_equal(list(grs(output=True)), zip([0,0,0], [1,2,3], zip(hist, [None,None,'spam'])))
50 nt.assert_equal(list(grs(output=True)), zip([0,0,0], [1,2,3], zip(hist, [None,None,'spam'])))
51
51
52 # Check whether specifying a range beyond the end of the current
52 # Check whether specifying a range beyond the end of the current
53 # session results in an error (gh-804)
53 # session results in an error (gh-804)
54 ip.magic('%hist 2-500')
54 ip.magic('%hist 2-500')
55
55
56 # Check that we can write non-ascii characters to a file
56 # Check that we can write non-ascii characters to a file
57 ip.magic("%%hist -f %s" % os.path.join(tmpdir, "test1"))
57 ip.magic("%%hist -f %s" % os.path.join(tmpdir, "test1"))
58 ip.magic("%%hist -pf %s" % os.path.join(tmpdir, "test2"))
58 ip.magic("%%hist -pf %s" % os.path.join(tmpdir, "test2"))
59 ip.magic("%%hist -nf %s" % os.path.join(tmpdir, "test3"))
59 ip.magic("%%hist -nf %s" % os.path.join(tmpdir, "test3"))
60 ip.magic("%%save %s 1-10" % os.path.join(tmpdir, "test4"))
60 ip.magic("%%save %s 1-10" % os.path.join(tmpdir, "test4"))
61
61
62 # New session
62 # New session
63 ip.history_manager.reset()
63 ip.history_manager.reset()
64 newcmds = ["z=5","class X(object):\n pass", "k='p'"]
64 newcmds = ["z=5","class X(object):\n pass", "k='p'"]
65 for i, cmd in enumerate(newcmds, start=1):
65 for i, cmd in enumerate(newcmds, start=1):
66 ip.history_manager.store_inputs(i, cmd)
66 ip.history_manager.store_inputs(i, cmd)
67 gothist = ip.history_manager.get_range(start=1, stop=4)
67 gothist = ip.history_manager.get_range(start=1, stop=4)
68 nt.assert_equal(list(gothist), zip([0,0,0],[1,2,3], newcmds))
68 nt.assert_equal(list(gothist), zip([0,0,0],[1,2,3], newcmds))
69 # Previous session:
69 # Previous session:
70 gothist = ip.history_manager.get_range(-1, 1, 4)
70 gothist = ip.history_manager.get_range(-1, 1, 4)
71 nt.assert_equal(list(gothist), zip([1,1,1],[1,2,3], hist))
71 nt.assert_equal(list(gothist), zip([1,1,1],[1,2,3], hist))
72
72
73 # Check get_hist_tail
73 # Check get_hist_tail
74 gothist = ip.history_manager.get_tail(4, output=True,
74 gothist = ip.history_manager.get_tail(4, output=True,
75 include_latest=True)
75 include_latest=True)
76 expected = [(1, 3, (hist[-1], "spam")),
76 expected = [(1, 3, (hist[-1], "spam")),
77 (2, 1, (newcmds[0], None)),
77 (2, 1, (newcmds[0], None)),
78 (2, 2, (newcmds[1], None)),
78 (2, 2, (newcmds[1], None)),
79 (2, 3, (newcmds[2], None)),]
79 (2, 3, (newcmds[2], None)),]
80 nt.assert_equal(list(gothist), expected)
80 nt.assert_equal(list(gothist), expected)
81
81
82 gothist = ip.history_manager.get_tail(2)
82 gothist = ip.history_manager.get_tail(2)
83 expected = [(2, 1, newcmds[0]),
83 expected = [(2, 1, newcmds[0]),
84 (2, 2, newcmds[1])]
84 (2, 2, newcmds[1])]
85 nt.assert_equal(list(gothist), expected)
85 nt.assert_equal(list(gothist), expected)
86
86
87 # Check get_hist_search
87 # Check get_hist_search
88 gothist = ip.history_manager.search("*test*")
88 gothist = ip.history_manager.search("*test*")
89 nt.assert_equal(list(gothist), [(1,2,hist[1])] )
89 nt.assert_equal(list(gothist), [(1,2,hist[1])] )
90 gothist = ip.history_manager.search("b*", output=True)
90 gothist = ip.history_manager.search("b*", output=True)
91 nt.assert_equal(list(gothist), [(1,3,(hist[2],"spam"))] )
91 nt.assert_equal(list(gothist), [(1,3,(hist[2],"spam"))] )
92
92
93 # Cross testing: check that magic %save can get previous session.
93 # Cross testing: check that magic %save can get previous session.
94 testfilename = os.path.realpath(os.path.join(tmpdir, "test.py"))
94 testfilename = os.path.realpath(os.path.join(tmpdir, "test.py"))
95 ip.magic_save(testfilename + " ~1/1-3")
95 ip.magic_save(testfilename + " ~1/1-3")
96 with py3compat.open(testfilename) as testfile:
96 with py3compat.open(testfilename) as testfile:
97 nt.assert_equal(testfile.read(),
97 nt.assert_equal(testfile.read(),
98 u"# coding: utf-8\n" + u"\n".join(hist))
98 u"# coding: utf-8\n" + u"\n".join(hist))
99
99
100 # Duplicate line numbers - check that it doesn't crash, and
100 # Duplicate line numbers - check that it doesn't crash, and
101 # gets a new session
101 # gets a new session
102 ip.history_manager.store_inputs(1, "rogue")
102 ip.history_manager.store_inputs(1, "rogue")
103 ip.history_manager.writeout_cache()
103 ip.history_manager.writeout_cache()
104 nt.assert_equal(ip.history_manager.session_number, 3)
104 nt.assert_equal(ip.history_manager.session_number, 3)
105 finally:
105 finally:
106 # Restore history manager
106 # Restore history manager
107 ip.history_manager = hist_manager_ori
107 ip.history_manager = hist_manager_ori
108
108
109
109
110 def test_extract_hist_ranges():
110 def test_extract_hist_ranges():
111 instr = "1 2/3 ~4/5-6 ~4/7-~4/9 ~9/2-~7/5"
111 instr = "1 2/3 ~4/5-6 ~4/7-~4/9 ~9/2-~7/5"
112 expected = [(0, 1, 2), # 0 == current session
112 expected = [(0, 1, 2), # 0 == current session
113 (2, 3, 4),
113 (2, 3, 4),
114 (-4, 5, 7),
114 (-4, 5, 7),
115 (-4, 7, 10),
115 (-4, 7, 10),
116 (-9, 2, None), # None == to end
116 (-9, 2, None), # None == to end
117 (-8, 1, None),
117 (-8, 1, None),
118 (-7, 1, 6)]
118 (-7, 1, 6)]
119 actual = list(extract_hist_ranges(instr))
119 actual = list(extract_hist_ranges(instr))
120 nt.assert_equal(actual, expected)
120 nt.assert_equal(actual, expected)
121
121
122 def test_magic_rerun():
122 def test_magic_rerun():
123 """Simple test for %rerun (no args -> rerun last line)"""
123 """Simple test for %rerun (no args -> rerun last line)"""
124 ip = get_ipython()
124 ip = get_ipython()
125 ip.run_cell("a = 10", store_history=True)
125 ip.run_cell("a = 10", store_history=True)
126 ip.run_cell("a += 1", store_history=True)
126 ip.run_cell("a += 1", store_history=True)
127 nt.assert_equal(ip.user_ns["a"], 11)
127 nt.assert_equal(ip.user_ns["a"], 11)
128 ip.run_cell("%rerun", store_history=True)
128 ip.run_cell("%rerun", store_history=True)
129 nt.assert_equal(ip.user_ns["a"], 12)
129 nt.assert_equal(ip.user_ns["a"], 12)
130
130
131 def test_timestamp_type():
131 def test_timestamp_type():
132 ip = get_ipython()
132 ip = get_ipython()
133 info = ip.history_manager.get_session_info()
133 info = ip.history_manager.get_session_info()
134 nt.assert_true(isinstance(info[1], datetime))
134 nt.assert_true(isinstance(info[1], datetime))
135
135
136 def test_hist_file_config():
136 def test_hist_file_config():
137 cfg = Config()
137 cfg = Config()
138 tfile = tempfile.NamedTemporaryFile(delete=False)
138 tfile = tempfile.NamedTemporaryFile(delete=False)
139 cfg.HistoryManager.hist_file = tfile.name
139 cfg.HistoryManager.hist_file = tfile.name
140 try:
140 try:
141 hm = HistoryManager(shell=get_ipython(), config=cfg)
141 hm = HistoryManager(shell=get_ipython(), config=cfg)
142 nt.assert_equals(hm.hist_file, cfg.HistoryManager.hist_file)
142 nt.assert_equals(hm.hist_file, cfg.HistoryManager.hist_file)
143 finally:
143 finally:
144 try:
144 try:
145 os.remove(tfile.name)
145 os.remove(tfile.name)
146 except OSError:
146 except OSError:
147 # same catch as in testing.tools.TempFileMixin
147 # same catch as in testing.tools.TempFileMixin
148 # On Windows, even though we close the file, we still can't
148 # On Windows, even though we close the file, we still can't
149 # delete it. I have no clue why
149 # delete it. I have no clue why
150 pass
150 pass
151
151
1 NO CONTENT: modified file
NO CONTENT: modified file
The requested commit or file is too big and content was truncated. Show full diff
@@ -1,391 +1,391 b''
1 """Generic testing tools.
1 """Generic testing tools.
2
2
3 In particular, this module exposes a set of top-level assert* functions that
3 In particular, this module exposes a set of top-level assert* functions that
4 can be used in place of nose.tools.assert* in method generators (the ones in
4 can be used in place of nose.tools.assert* in method generators (the ones in
5 nose can not, at least as of nose 0.10.4).
5 nose can not, at least as of nose 0.10.4).
6
6
7
7
8 Authors
8 Authors
9 -------
9 -------
10 - Fernando Perez <Fernando.Perez@berkeley.edu>
10 - Fernando Perez <Fernando.Perez@berkeley.edu>
11 """
11 """
12
12
13 from __future__ import absolute_import
13 from __future__ import absolute_import
14
14
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 # Copyright (C) 2009-2011 The IPython Development Team
16 # Copyright (C) 2009-2011 The IPython Development Team
17 #
17 #
18 # Distributed under the terms of the BSD License. The full license is in
18 # Distributed under the terms of the BSD License. The full license is in
19 # the file COPYING, distributed as part of this software.
19 # the file COPYING, distributed as part of this software.
20 #-----------------------------------------------------------------------------
20 #-----------------------------------------------------------------------------
21
21
22 #-----------------------------------------------------------------------------
22 #-----------------------------------------------------------------------------
23 # Imports
23 # Imports
24 #-----------------------------------------------------------------------------
24 #-----------------------------------------------------------------------------
25
25
26 import os
26 import os
27 import re
27 import re
28 import sys
28 import sys
29 import tempfile
29 import tempfile
30
30
31 from contextlib import contextmanager
31 from contextlib import contextmanager
32 from io import StringIO
32 from io import StringIO
33
33
34 try:
34 try:
35 # These tools are used by parts of the runtime, so we make the nose
35 # These tools are used by parts of the runtime, so we make the nose
36 # dependency optional at this point. Nose is a hard dependency to run the
36 # dependency optional at this point. Nose is a hard dependency to run the
37 # test suite, but NOT to use ipython itself.
37 # test suite, but NOT to use ipython itself.
38 import nose.tools as nt
38 import nose.tools as nt
39 has_nose = True
39 has_nose = True
40 except ImportError:
40 except ImportError:
41 has_nose = False
41 has_nose = False
42
42
43 from IPython.config.loader import Config
43 from IPython.config.loader import Config
44 from IPython.utils.process import find_cmd, getoutputerror
44 from IPython.utils.process import find_cmd, getoutputerror
45 from IPython.utils.text import list_strings, getdefaultencoding
45 from IPython.utils.text import list_strings
46 from IPython.utils.io import temp_pyfile, Tee
46 from IPython.utils.io import temp_pyfile, Tee
47 from IPython.utils import py3compat
47 from IPython.utils import py3compat
48
48
49 from . import decorators as dec
49 from . import decorators as dec
50 from . import skipdoctest
50 from . import skipdoctest
51
51
52 #-----------------------------------------------------------------------------
52 #-----------------------------------------------------------------------------
53 # Globals
53 # Globals
54 #-----------------------------------------------------------------------------
54 #-----------------------------------------------------------------------------
55
55
56 # Make a bunch of nose.tools assert wrappers that can be used in test
56 # Make a bunch of nose.tools assert wrappers that can be used in test
57 # generators. This will expose an assert* function for each one in nose.tools.
57 # generators. This will expose an assert* function for each one in nose.tools.
58
58
59 _tpl = """
59 _tpl = """
60 def %(name)s(*a,**kw):
60 def %(name)s(*a,**kw):
61 return nt.%(name)s(*a,**kw)
61 return nt.%(name)s(*a,**kw)
62 """
62 """
63
63
64 if has_nose:
64 if has_nose:
65 for _x in [a for a in dir(nt) if a.startswith('assert')]:
65 for _x in [a for a in dir(nt) if a.startswith('assert')]:
66 exec _tpl % dict(name=_x)
66 exec _tpl % dict(name=_x)
67
67
68 #-----------------------------------------------------------------------------
68 #-----------------------------------------------------------------------------
69 # Functions and classes
69 # Functions and classes
70 #-----------------------------------------------------------------------------
70 #-----------------------------------------------------------------------------
71
71
72 # The docstring for full_path doctests differently on win32 (different path
72 # The docstring for full_path doctests differently on win32 (different path
73 # separator) so just skip the doctest there. The example remains informative.
73 # separator) so just skip the doctest there. The example remains informative.
74 doctest_deco = skipdoctest.skip_doctest if sys.platform == 'win32' else dec.null_deco
74 doctest_deco = skipdoctest.skip_doctest if sys.platform == 'win32' else dec.null_deco
75
75
76 @doctest_deco
76 @doctest_deco
77 def full_path(startPath,files):
77 def full_path(startPath,files):
78 """Make full paths for all the listed files, based on startPath.
78 """Make full paths for all the listed files, based on startPath.
79
79
80 Only the base part of startPath is kept, since this routine is typically
80 Only the base part of startPath is kept, since this routine is typically
81 used with a script's __file__ variable as startPath. The base of startPath
81 used with a script's __file__ variable as startPath. The base of startPath
82 is then prepended to all the listed files, forming the output list.
82 is then prepended to all the listed files, forming the output list.
83
83
84 Parameters
84 Parameters
85 ----------
85 ----------
86 startPath : string
86 startPath : string
87 Initial path to use as the base for the results. This path is split
87 Initial path to use as the base for the results. This path is split
88 using os.path.split() and only its first component is kept.
88 using os.path.split() and only its first component is kept.
89
89
90 files : string or list
90 files : string or list
91 One or more files.
91 One or more files.
92
92
93 Examples
93 Examples
94 --------
94 --------
95
95
96 >>> full_path('/foo/bar.py',['a.txt','b.txt'])
96 >>> full_path('/foo/bar.py',['a.txt','b.txt'])
97 ['/foo/a.txt', '/foo/b.txt']
97 ['/foo/a.txt', '/foo/b.txt']
98
98
99 >>> full_path('/foo',['a.txt','b.txt'])
99 >>> full_path('/foo',['a.txt','b.txt'])
100 ['/a.txt', '/b.txt']
100 ['/a.txt', '/b.txt']
101
101
102 If a single file is given, the output is still a list:
102 If a single file is given, the output is still a list:
103 >>> full_path('/foo','a.txt')
103 >>> full_path('/foo','a.txt')
104 ['/a.txt']
104 ['/a.txt']
105 """
105 """
106
106
107 files = list_strings(files)
107 files = list_strings(files)
108 base = os.path.split(startPath)[0]
108 base = os.path.split(startPath)[0]
109 return [ os.path.join(base,f) for f in files ]
109 return [ os.path.join(base,f) for f in files ]
110
110
111
111
112 def parse_test_output(txt):
112 def parse_test_output(txt):
113 """Parse the output of a test run and return errors, failures.
113 """Parse the output of a test run and return errors, failures.
114
114
115 Parameters
115 Parameters
116 ----------
116 ----------
117 txt : str
117 txt : str
118 Text output of a test run, assumed to contain a line of one of the
118 Text output of a test run, assumed to contain a line of one of the
119 following forms::
119 following forms::
120 'FAILED (errors=1)'
120 'FAILED (errors=1)'
121 'FAILED (failures=1)'
121 'FAILED (failures=1)'
122 'FAILED (errors=1, failures=1)'
122 'FAILED (errors=1, failures=1)'
123
123
124 Returns
124 Returns
125 -------
125 -------
126 nerr, nfail: number of errors and failures.
126 nerr, nfail: number of errors and failures.
127 """
127 """
128
128
129 err_m = re.search(r'^FAILED \(errors=(\d+)\)', txt, re.MULTILINE)
129 err_m = re.search(r'^FAILED \(errors=(\d+)\)', txt, re.MULTILINE)
130 if err_m:
130 if err_m:
131 nerr = int(err_m.group(1))
131 nerr = int(err_m.group(1))
132 nfail = 0
132 nfail = 0
133 return nerr, nfail
133 return nerr, nfail
134
134
135 fail_m = re.search(r'^FAILED \(failures=(\d+)\)', txt, re.MULTILINE)
135 fail_m = re.search(r'^FAILED \(failures=(\d+)\)', txt, re.MULTILINE)
136 if fail_m:
136 if fail_m:
137 nerr = 0
137 nerr = 0
138 nfail = int(fail_m.group(1))
138 nfail = int(fail_m.group(1))
139 return nerr, nfail
139 return nerr, nfail
140
140
141 both_m = re.search(r'^FAILED \(errors=(\d+), failures=(\d+)\)', txt,
141 both_m = re.search(r'^FAILED \(errors=(\d+), failures=(\d+)\)', txt,
142 re.MULTILINE)
142 re.MULTILINE)
143 if both_m:
143 if both_m:
144 nerr = int(both_m.group(1))
144 nerr = int(both_m.group(1))
145 nfail = int(both_m.group(2))
145 nfail = int(both_m.group(2))
146 return nerr, nfail
146 return nerr, nfail
147
147
148 # If the input didn't match any of these forms, assume no error/failures
148 # If the input didn't match any of these forms, assume no error/failures
149 return 0, 0
149 return 0, 0
150
150
151
151
152 # So nose doesn't think this is a test
152 # So nose doesn't think this is a test
153 parse_test_output.__test__ = False
153 parse_test_output.__test__ = False
154
154
155
155
156 def default_argv():
156 def default_argv():
157 """Return a valid default argv for creating testing instances of ipython"""
157 """Return a valid default argv for creating testing instances of ipython"""
158
158
159 return ['--quick', # so no config file is loaded
159 return ['--quick', # so no config file is loaded
160 # Other defaults to minimize side effects on stdout
160 # Other defaults to minimize side effects on stdout
161 '--colors=NoColor', '--no-term-title','--no-banner',
161 '--colors=NoColor', '--no-term-title','--no-banner',
162 '--autocall=0']
162 '--autocall=0']
163
163
164
164
165 def default_config():
165 def default_config():
166 """Return a config object with good defaults for testing."""
166 """Return a config object with good defaults for testing."""
167 config = Config()
167 config = Config()
168 config.TerminalInteractiveShell.colors = 'NoColor'
168 config.TerminalInteractiveShell.colors = 'NoColor'
169 config.TerminalTerminalInteractiveShell.term_title = False,
169 config.TerminalTerminalInteractiveShell.term_title = False,
170 config.TerminalInteractiveShell.autocall = 0
170 config.TerminalInteractiveShell.autocall = 0
171 config.HistoryManager.hist_file = tempfile.mktemp(u'test_hist.sqlite')
171 config.HistoryManager.hist_file = tempfile.mktemp(u'test_hist.sqlite')
172 config.HistoryManager.db_cache_size = 10000
172 config.HistoryManager.db_cache_size = 10000
173 return config
173 return config
174
174
175
175
176 def ipexec(fname, options=None):
176 def ipexec(fname, options=None):
177 """Utility to call 'ipython filename'.
177 """Utility to call 'ipython filename'.
178
178
179 Starts IPython witha minimal and safe configuration to make startup as fast
179 Starts IPython witha minimal and safe configuration to make startup as fast
180 as possible.
180 as possible.
181
181
182 Note that this starts IPython in a subprocess!
182 Note that this starts IPython in a subprocess!
183
183
184 Parameters
184 Parameters
185 ----------
185 ----------
186 fname : str
186 fname : str
187 Name of file to be executed (should have .py or .ipy extension).
187 Name of file to be executed (should have .py or .ipy extension).
188
188
189 options : optional, list
189 options : optional, list
190 Extra command-line flags to be passed to IPython.
190 Extra command-line flags to be passed to IPython.
191
191
192 Returns
192 Returns
193 -------
193 -------
194 (stdout, stderr) of ipython subprocess.
194 (stdout, stderr) of ipython subprocess.
195 """
195 """
196 if options is None: options = []
196 if options is None: options = []
197
197
198 # For these subprocess calls, eliminate all prompt printing so we only see
198 # For these subprocess calls, eliminate all prompt printing so we only see
199 # output from script execution
199 # output from script execution
200 prompt_opts = [ '--PromptManager.in_template=""',
200 prompt_opts = [ '--PromptManager.in_template=""',
201 '--PromptManager.in2_template=""',
201 '--PromptManager.in2_template=""',
202 '--PromptManager.out_template=""'
202 '--PromptManager.out_template=""'
203 ]
203 ]
204 cmdargs = ' '.join(default_argv() + prompt_opts + options)
204 cmdargs = ' '.join(default_argv() + prompt_opts + options)
205
205
206 _ip = get_ipython()
206 _ip = get_ipython()
207 test_dir = os.path.dirname(__file__)
207 test_dir = os.path.dirname(__file__)
208
208
209 ipython_cmd = find_cmd('ipython3' if py3compat.PY3 else 'ipython')
209 ipython_cmd = find_cmd('ipython3' if py3compat.PY3 else 'ipython')
210 # Absolute path for filename
210 # Absolute path for filename
211 full_fname = os.path.join(test_dir, fname)
211 full_fname = os.path.join(test_dir, fname)
212 full_cmd = '%s %s %s' % (ipython_cmd, cmdargs, full_fname)
212 full_cmd = '%s %s %s' % (ipython_cmd, cmdargs, full_fname)
213 #print >> sys.stderr, 'FULL CMD:', full_cmd # dbg
213 #print >> sys.stderr, 'FULL CMD:', full_cmd # dbg
214 out, err = getoutputerror(full_cmd)
214 out, err = getoutputerror(full_cmd)
215 # `import readline` causes 'ESC[?1034h' to be output sometimes,
215 # `import readline` causes 'ESC[?1034h' to be output sometimes,
216 # so strip that out before doing comparisons
216 # so strip that out before doing comparisons
217 if out:
217 if out:
218 out = re.sub(r'\x1b\[[^h]+h', '', out)
218 out = re.sub(r'\x1b\[[^h]+h', '', out)
219 return out, err
219 return out, err
220
220
221
221
222 def ipexec_validate(fname, expected_out, expected_err='',
222 def ipexec_validate(fname, expected_out, expected_err='',
223 options=None):
223 options=None):
224 """Utility to call 'ipython filename' and validate output/error.
224 """Utility to call 'ipython filename' and validate output/error.
225
225
226 This function raises an AssertionError if the validation fails.
226 This function raises an AssertionError if the validation fails.
227
227
228 Note that this starts IPython in a subprocess!
228 Note that this starts IPython in a subprocess!
229
229
230 Parameters
230 Parameters
231 ----------
231 ----------
232 fname : str
232 fname : str
233 Name of the file to be executed (should have .py or .ipy extension).
233 Name of the file to be executed (should have .py or .ipy extension).
234
234
235 expected_out : str
235 expected_out : str
236 Expected stdout of the process.
236 Expected stdout of the process.
237
237
238 expected_err : optional, str
238 expected_err : optional, str
239 Expected stderr of the process.
239 Expected stderr of the process.
240
240
241 options : optional, list
241 options : optional, list
242 Extra command-line flags to be passed to IPython.
242 Extra command-line flags to be passed to IPython.
243
243
244 Returns
244 Returns
245 -------
245 -------
246 None
246 None
247 """
247 """
248
248
249 import nose.tools as nt
249 import nose.tools as nt
250
250
251 out, err = ipexec(fname, options)
251 out, err = ipexec(fname, options)
252 #print 'OUT', out # dbg
252 #print 'OUT', out # dbg
253 #print 'ERR', err # dbg
253 #print 'ERR', err # dbg
254 # If there are any errors, we must check those befor stdout, as they may be
254 # If there are any errors, we must check those befor stdout, as they may be
255 # more informative than simply having an empty stdout.
255 # more informative than simply having an empty stdout.
256 if err:
256 if err:
257 if expected_err:
257 if expected_err:
258 nt.assert_equals(err.strip(), expected_err.strip())
258 nt.assert_equals(err.strip(), expected_err.strip())
259 else:
259 else:
260 raise ValueError('Running file %r produced error: %r' %
260 raise ValueError('Running file %r produced error: %r' %
261 (fname, err))
261 (fname, err))
262 # If no errors or output on stderr was expected, match stdout
262 # If no errors or output on stderr was expected, match stdout
263 nt.assert_equals(out.strip(), expected_out.strip())
263 nt.assert_equals(out.strip(), expected_out.strip())
264
264
265
265
266 class TempFileMixin(object):
266 class TempFileMixin(object):
267 """Utility class to create temporary Python/IPython files.
267 """Utility class to create temporary Python/IPython files.
268
268
269 Meant as a mixin class for test cases."""
269 Meant as a mixin class for test cases."""
270
270
271 def mktmp(self, src, ext='.py'):
271 def mktmp(self, src, ext='.py'):
272 """Make a valid python temp file."""
272 """Make a valid python temp file."""
273 fname, f = temp_pyfile(src, ext)
273 fname, f = temp_pyfile(src, ext)
274 self.tmpfile = f
274 self.tmpfile = f
275 self.fname = fname
275 self.fname = fname
276
276
277 def tearDown(self):
277 def tearDown(self):
278 if hasattr(self, 'tmpfile'):
278 if hasattr(self, 'tmpfile'):
279 # If the tmpfile wasn't made because of skipped tests, like in
279 # If the tmpfile wasn't made because of skipped tests, like in
280 # win32, there's nothing to cleanup.
280 # win32, there's nothing to cleanup.
281 self.tmpfile.close()
281 self.tmpfile.close()
282 try:
282 try:
283 os.unlink(self.fname)
283 os.unlink(self.fname)
284 except:
284 except:
285 # On Windows, even though we close the file, we still can't
285 # On Windows, even though we close the file, we still can't
286 # delete it. I have no clue why
286 # delete it. I have no clue why
287 pass
287 pass
288
288
289 pair_fail_msg = ("Testing {0}\n\n"
289 pair_fail_msg = ("Testing {0}\n\n"
290 "In:\n"
290 "In:\n"
291 " {1!r}\n"
291 " {1!r}\n"
292 "Expected:\n"
292 "Expected:\n"
293 " {2!r}\n"
293 " {2!r}\n"
294 "Got:\n"
294 "Got:\n"
295 " {3!r}\n")
295 " {3!r}\n")
296 def check_pairs(func, pairs):
296 def check_pairs(func, pairs):
297 """Utility function for the common case of checking a function with a
297 """Utility function for the common case of checking a function with a
298 sequence of input/output pairs.
298 sequence of input/output pairs.
299
299
300 Parameters
300 Parameters
301 ----------
301 ----------
302 func : callable
302 func : callable
303 The function to be tested. Should accept a single argument.
303 The function to be tested. Should accept a single argument.
304 pairs : iterable
304 pairs : iterable
305 A list of (input, expected_output) tuples.
305 A list of (input, expected_output) tuples.
306
306
307 Returns
307 Returns
308 -------
308 -------
309 None. Raises an AssertionError if any output does not match the expected
309 None. Raises an AssertionError if any output does not match the expected
310 value.
310 value.
311 """
311 """
312 name = getattr(func, "func_name", getattr(func, "__name__", "<unknown>"))
312 name = getattr(func, "func_name", getattr(func, "__name__", "<unknown>"))
313 for inp, expected in pairs:
313 for inp, expected in pairs:
314 out = func(inp)
314 out = func(inp)
315 assert out == expected, pair_fail_msg.format(name, inp, expected, out)
315 assert out == expected, pair_fail_msg.format(name, inp, expected, out)
316
316
317
317
318 if py3compat.PY3:
318 if py3compat.PY3:
319 MyStringIO = StringIO
319 MyStringIO = StringIO
320 else:
320 else:
321 # In Python 2, stdout/stderr can have either bytes or unicode written to them,
321 # In Python 2, stdout/stderr can have either bytes or unicode written to them,
322 # so we need a class that can handle both.
322 # so we need a class that can handle both.
323 class MyStringIO(StringIO):
323 class MyStringIO(StringIO):
324 def write(self, s):
324 def write(self, s):
325 s = py3compat.cast_unicode(s, encoding=getdefaultencoding())
325 s = py3compat.cast_unicode(s, encoding=py3compat.getdefaultencoding())
326 super(MyStringIO, self).write(s)
326 super(MyStringIO, self).write(s)
327
327
328 notprinted_msg = """Did not find {0!r} in printed output (on {1}):
328 notprinted_msg = """Did not find {0!r} in printed output (on {1}):
329 {2!r}"""
329 {2!r}"""
330
330
331 class AssertPrints(object):
331 class AssertPrints(object):
332 """Context manager for testing that code prints certain text.
332 """Context manager for testing that code prints certain text.
333
333
334 Examples
334 Examples
335 --------
335 --------
336 >>> with AssertPrints("abc", suppress=False):
336 >>> with AssertPrints("abc", suppress=False):
337 ... print "abcd"
337 ... print "abcd"
338 ... print "def"
338 ... print "def"
339 ...
339 ...
340 abcd
340 abcd
341 def
341 def
342 """
342 """
343 def __init__(self, s, channel='stdout', suppress=True):
343 def __init__(self, s, channel='stdout', suppress=True):
344 self.s = s
344 self.s = s
345 self.channel = channel
345 self.channel = channel
346 self.suppress = suppress
346 self.suppress = suppress
347
347
348 def __enter__(self):
348 def __enter__(self):
349 self.orig_stream = getattr(sys, self.channel)
349 self.orig_stream = getattr(sys, self.channel)
350 self.buffer = MyStringIO()
350 self.buffer = MyStringIO()
351 self.tee = Tee(self.buffer, channel=self.channel)
351 self.tee = Tee(self.buffer, channel=self.channel)
352 setattr(sys, self.channel, self.buffer if self.suppress else self.tee)
352 setattr(sys, self.channel, self.buffer if self.suppress else self.tee)
353
353
354 def __exit__(self, etype, value, traceback):
354 def __exit__(self, etype, value, traceback):
355 self.tee.flush()
355 self.tee.flush()
356 setattr(sys, self.channel, self.orig_stream)
356 setattr(sys, self.channel, self.orig_stream)
357 printed = self.buffer.getvalue()
357 printed = self.buffer.getvalue()
358 assert self.s in printed, notprinted_msg.format(self.s, self.channel, printed)
358 assert self.s in printed, notprinted_msg.format(self.s, self.channel, printed)
359 return False
359 return False
360
360
361 class AssertNotPrints(AssertPrints):
361 class AssertNotPrints(AssertPrints):
362 """Context manager for checking that certain output *isn't* produced.
362 """Context manager for checking that certain output *isn't* produced.
363
363
364 Counterpart of AssertPrints"""
364 Counterpart of AssertPrints"""
365 def __exit__(self, etype, value, traceback):
365 def __exit__(self, etype, value, traceback):
366 self.tee.flush()
366 self.tee.flush()
367 setattr(sys, self.channel, self.orig_stream)
367 setattr(sys, self.channel, self.orig_stream)
368 printed = self.buffer.getvalue()
368 printed = self.buffer.getvalue()
369 assert self.s not in printed, notprinted_msg.format(self.s, self.channel, printed)
369 assert self.s not in printed, notprinted_msg.format(self.s, self.channel, printed)
370 return False
370 return False
371
371
372 @contextmanager
372 @contextmanager
373 def mute_warn():
373 def mute_warn():
374 from IPython.utils import warn
374 from IPython.utils import warn
375 save_warn = warn.warn
375 save_warn = warn.warn
376 warn.warn = lambda *a, **kw: None
376 warn.warn = lambda *a, **kw: None
377 try:
377 try:
378 yield
378 yield
379 finally:
379 finally:
380 warn.warn = save_warn
380 warn.warn = save_warn
381
381
382 @contextmanager
382 @contextmanager
383 def make_tempfile(name):
383 def make_tempfile(name):
384 """ Create an empty, named, temporary file for the duration of the context.
384 """ Create an empty, named, temporary file for the duration of the context.
385 """
385 """
386 f = open(name, 'w')
386 f = open(name, 'w')
387 f.close()
387 f.close()
388 try:
388 try:
389 yield
389 yield
390 finally:
390 finally:
391 os.unlink(name)
391 os.unlink(name)
@@ -1,197 +1,197 b''
1 """Posix-specific implementation of process utilities.
1 """Posix-specific implementation of process utilities.
2
2
3 This file is only meant to be imported by process.py, not by end-users.
3 This file is only meant to be imported by process.py, not by end-users.
4 """
4 """
5
5
6 #-----------------------------------------------------------------------------
6 #-----------------------------------------------------------------------------
7 # Copyright (C) 2010-2011 The IPython Development Team
7 # Copyright (C) 2010-2011 The IPython Development Team
8 #
8 #
9 # Distributed under the terms of the BSD License. The full license is in
9 # Distributed under the terms of the BSD License. The full license is in
10 # the file COPYING, distributed as part of this software.
10 # the file COPYING, distributed as part of this software.
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12
12
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 from __future__ import print_function
16 from __future__ import print_function
17
17
18 # Stdlib
18 # Stdlib
19 import subprocess as sp
19 import subprocess as sp
20 import sys
20 import sys
21
21
22 from IPython.external import pexpect
22 from IPython.external import pexpect
23
23
24 # Our own
24 # Our own
25 from .autoattr import auto_attr
25 from .autoattr import auto_attr
26 from ._process_common import getoutput, arg_split
26 from ._process_common import getoutput, arg_split
27 from IPython.utils import text
27 from IPython.utils import text
28 from IPython.utils import py3compat
28 from IPython.utils import py3compat
29
29
30 #-----------------------------------------------------------------------------
30 #-----------------------------------------------------------------------------
31 # Function definitions
31 # Function definitions
32 #-----------------------------------------------------------------------------
32 #-----------------------------------------------------------------------------
33
33
34 def _find_cmd(cmd):
34 def _find_cmd(cmd):
35 """Find the full path to a command using which."""
35 """Find the full path to a command using which."""
36
36
37 path = sp.Popen(['/usr/bin/env', 'which', cmd],
37 path = sp.Popen(['/usr/bin/env', 'which', cmd],
38 stdout=sp.PIPE).communicate()[0]
38 stdout=sp.PIPE).communicate()[0]
39 return py3compat.bytes_to_str(path)
39 return py3compat.bytes_to_str(path)
40
40
41
41
42 class ProcessHandler(object):
42 class ProcessHandler(object):
43 """Execute subprocesses under the control of pexpect.
43 """Execute subprocesses under the control of pexpect.
44 """
44 """
45 # Timeout in seconds to wait on each reading of the subprocess' output.
45 # Timeout in seconds to wait on each reading of the subprocess' output.
46 # This should not be set too low to avoid cpu overusage from our side,
46 # This should not be set too low to avoid cpu overusage from our side,
47 # since we read in a loop whose period is controlled by this timeout.
47 # since we read in a loop whose period is controlled by this timeout.
48 read_timeout = 0.05
48 read_timeout = 0.05
49
49
50 # Timeout to give a process if we receive SIGINT, between sending the
50 # Timeout to give a process if we receive SIGINT, between sending the
51 # SIGINT to the process and forcefully terminating it.
51 # SIGINT to the process and forcefully terminating it.
52 terminate_timeout = 0.2
52 terminate_timeout = 0.2
53
53
54 # File object where stdout and stderr of the subprocess will be written
54 # File object where stdout and stderr of the subprocess will be written
55 logfile = None
55 logfile = None
56
56
57 # Shell to call for subprocesses to execute
57 # Shell to call for subprocesses to execute
58 sh = None
58 sh = None
59
59
60 @auto_attr
60 @auto_attr
61 def sh(self):
61 def sh(self):
62 sh = pexpect.which('sh')
62 sh = pexpect.which('sh')
63 if sh is None:
63 if sh is None:
64 raise OSError('"sh" shell not found')
64 raise OSError('"sh" shell not found')
65 return sh
65 return sh
66
66
67 def __init__(self, logfile=None, read_timeout=None, terminate_timeout=None):
67 def __init__(self, logfile=None, read_timeout=None, terminate_timeout=None):
68 """Arguments are used for pexpect calls."""
68 """Arguments are used for pexpect calls."""
69 self.read_timeout = (ProcessHandler.read_timeout if read_timeout is
69 self.read_timeout = (ProcessHandler.read_timeout if read_timeout is
70 None else read_timeout)
70 None else read_timeout)
71 self.terminate_timeout = (ProcessHandler.terminate_timeout if
71 self.terminate_timeout = (ProcessHandler.terminate_timeout if
72 terminate_timeout is None else
72 terminate_timeout is None else
73 terminate_timeout)
73 terminate_timeout)
74 self.logfile = sys.stdout if logfile is None else logfile
74 self.logfile = sys.stdout if logfile is None else logfile
75
75
76 def getoutput(self, cmd):
76 def getoutput(self, cmd):
77 """Run a command and return its stdout/stderr as a string.
77 """Run a command and return its stdout/stderr as a string.
78
78
79 Parameters
79 Parameters
80 ----------
80 ----------
81 cmd : str
81 cmd : str
82 A command to be executed in the system shell.
82 A command to be executed in the system shell.
83
83
84 Returns
84 Returns
85 -------
85 -------
86 output : str
86 output : str
87 A string containing the combination of stdout and stderr from the
87 A string containing the combination of stdout and stderr from the
88 subprocess, in whatever order the subprocess originally wrote to its
88 subprocess, in whatever order the subprocess originally wrote to its
89 file descriptors (so the order of the information in this string is the
89 file descriptors (so the order of the information in this string is the
90 correct order as would be seen if running the command in a terminal).
90 correct order as would be seen if running the command in a terminal).
91 """
91 """
92 try:
92 try:
93 return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n')
93 return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n')
94 except KeyboardInterrupt:
94 except KeyboardInterrupt:
95 print('^C', file=sys.stderr, end='')
95 print('^C', file=sys.stderr, end='')
96
96
97 def getoutput_pexpect(self, cmd):
97 def getoutput_pexpect(self, cmd):
98 """Run a command and return its stdout/stderr as a string.
98 """Run a command and return its stdout/stderr as a string.
99
99
100 Parameters
100 Parameters
101 ----------
101 ----------
102 cmd : str
102 cmd : str
103 A command to be executed in the system shell.
103 A command to be executed in the system shell.
104
104
105 Returns
105 Returns
106 -------
106 -------
107 output : str
107 output : str
108 A string containing the combination of stdout and stderr from the
108 A string containing the combination of stdout and stderr from the
109 subprocess, in whatever order the subprocess originally wrote to its
109 subprocess, in whatever order the subprocess originally wrote to its
110 file descriptors (so the order of the information in this string is the
110 file descriptors (so the order of the information in this string is the
111 correct order as would be seen if running the command in a terminal).
111 correct order as would be seen if running the command in a terminal).
112 """
112 """
113 try:
113 try:
114 return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n')
114 return pexpect.run(self.sh, args=['-c', cmd]).replace('\r\n', '\n')
115 except KeyboardInterrupt:
115 except KeyboardInterrupt:
116 print('^C', file=sys.stderr, end='')
116 print('^C', file=sys.stderr, end='')
117
117
118 def system(self, cmd):
118 def system(self, cmd):
119 """Execute a command in a subshell.
119 """Execute a command in a subshell.
120
120
121 Parameters
121 Parameters
122 ----------
122 ----------
123 cmd : str
123 cmd : str
124 A command to be executed in the system shell.
124 A command to be executed in the system shell.
125
125
126 Returns
126 Returns
127 -------
127 -------
128 int : child's exitstatus
128 int : child's exitstatus
129 """
129 """
130 # Get likely encoding for the output.
130 # Get likely encoding for the output.
131 enc = text.getdefaultencoding()
131 enc = py3compat.getdefaultencoding()
132
132
133 # Patterns to match on the output, for pexpect. We read input and
133 # Patterns to match on the output, for pexpect. We read input and
134 # allow either a short timeout or EOF
134 # allow either a short timeout or EOF
135 patterns = [pexpect.TIMEOUT, pexpect.EOF]
135 patterns = [pexpect.TIMEOUT, pexpect.EOF]
136 # the index of the EOF pattern in the list.
136 # the index of the EOF pattern in the list.
137 # even though we know it's 1, this call means we don't have to worry if
137 # even though we know it's 1, this call means we don't have to worry if
138 # we change the above list, and forget to change this value:
138 # we change the above list, and forget to change this value:
139 EOF_index = patterns.index(pexpect.EOF)
139 EOF_index = patterns.index(pexpect.EOF)
140 # The size of the output stored so far in the process output buffer.
140 # The size of the output stored so far in the process output buffer.
141 # Since pexpect only appends to this buffer, each time we print we
141 # Since pexpect only appends to this buffer, each time we print we
142 # record how far we've printed, so that next time we only print *new*
142 # record how far we've printed, so that next time we only print *new*
143 # content from the buffer.
143 # content from the buffer.
144 out_size = 0
144 out_size = 0
145 try:
145 try:
146 # Since we're not really searching the buffer for text patterns, we
146 # Since we're not really searching the buffer for text patterns, we
147 # can set pexpect's search window to be tiny and it won't matter.
147 # can set pexpect's search window to be tiny and it won't matter.
148 # We only search for the 'patterns' timeout or EOF, which aren't in
148 # We only search for the 'patterns' timeout or EOF, which aren't in
149 # the text itself.
149 # the text itself.
150 #child = pexpect.spawn(pcmd, searchwindowsize=1)
150 #child = pexpect.spawn(pcmd, searchwindowsize=1)
151 if hasattr(pexpect, 'spawnb'):
151 if hasattr(pexpect, 'spawnb'):
152 child = pexpect.spawnb(self.sh, args=['-c', cmd]) # Pexpect-U
152 child = pexpect.spawnb(self.sh, args=['-c', cmd]) # Pexpect-U
153 else:
153 else:
154 child = pexpect.spawn(self.sh, args=['-c', cmd]) # Vanilla Pexpect
154 child = pexpect.spawn(self.sh, args=['-c', cmd]) # Vanilla Pexpect
155 flush = sys.stdout.flush
155 flush = sys.stdout.flush
156 while True:
156 while True:
157 # res is the index of the pattern that caused the match, so we
157 # res is the index of the pattern that caused the match, so we
158 # know whether we've finished (if we matched EOF) or not
158 # know whether we've finished (if we matched EOF) or not
159 res_idx = child.expect_list(patterns, self.read_timeout)
159 res_idx = child.expect_list(patterns, self.read_timeout)
160 print(child.before[out_size:].decode(enc, 'replace'), end='')
160 print(child.before[out_size:].decode(enc, 'replace'), end='')
161 flush()
161 flush()
162 if res_idx==EOF_index:
162 if res_idx==EOF_index:
163 break
163 break
164 # Update the pointer to what we've already printed
164 # Update the pointer to what we've already printed
165 out_size = len(child.before)
165 out_size = len(child.before)
166 except KeyboardInterrupt:
166 except KeyboardInterrupt:
167 # We need to send ^C to the process. The ascii code for '^C' is 3
167 # We need to send ^C to the process. The ascii code for '^C' is 3
168 # (the character is known as ETX for 'End of Text', see
168 # (the character is known as ETX for 'End of Text', see
169 # curses.ascii.ETX).
169 # curses.ascii.ETX).
170 child.sendline(chr(3))
170 child.sendline(chr(3))
171 # Read and print any more output the program might produce on its
171 # Read and print any more output the program might produce on its
172 # way out.
172 # way out.
173 try:
173 try:
174 out_size = len(child.before)
174 out_size = len(child.before)
175 child.expect_list(patterns, self.terminate_timeout)
175 child.expect_list(patterns, self.terminate_timeout)
176 print(child.before[out_size:].decode(enc, 'replace'), end='')
176 print(child.before[out_size:].decode(enc, 'replace'), end='')
177 sys.stdout.flush()
177 sys.stdout.flush()
178 except KeyboardInterrupt:
178 except KeyboardInterrupt:
179 # Impatient users tend to type it multiple times
179 # Impatient users tend to type it multiple times
180 pass
180 pass
181 finally:
181 finally:
182 # Ensure the subprocess really is terminated
182 # Ensure the subprocess really is terminated
183 child.terminate(force=True)
183 child.terminate(force=True)
184 # add isalive check, to ensure exitstatus is set:
184 # add isalive check, to ensure exitstatus is set:
185 child.isalive()
185 child.isalive()
186 return child.exitstatus
186 return child.exitstatus
187
187
188
188
189 # Make system() with a functional interface for outside use. Note that we use
189 # Make system() with a functional interface for outside use. Note that we use
190 # getoutput() from the _common utils, which is built on top of popen(). Using
190 # getoutput() from the _common utils, which is built on top of popen(). Using
191 # pexpect to get subprocess output produces difficult to parse output, since
191 # pexpect to get subprocess output produces difficult to parse output, since
192 # programs think they are talking to a tty and produce highly formatted output
192 # programs think they are talking to a tty and produce highly formatted output
193 # (ls is a good example) that makes them hard.
193 # (ls is a good example) that makes them hard.
194 system = ProcessHandler().system
194 system = ProcessHandler().system
195
195
196
196
197
197
@@ -1,184 +1,184 b''
1 """Windows-specific implementation of process utilities.
1 """Windows-specific implementation of process utilities.
2
2
3 This file is only meant to be imported by process.py, not by end-users.
3 This file is only meant to be imported by process.py, not by end-users.
4 """
4 """
5
5
6 #-----------------------------------------------------------------------------
6 #-----------------------------------------------------------------------------
7 # Copyright (C) 2010-2011 The IPython Development Team
7 # Copyright (C) 2010-2011 The IPython Development Team
8 #
8 #
9 # Distributed under the terms of the BSD License. The full license is in
9 # Distributed under the terms of the BSD License. The full license is in
10 # the file COPYING, distributed as part of this software.
10 # the file COPYING, distributed as part of this software.
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12
12
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16 from __future__ import print_function
16 from __future__ import print_function
17
17
18 # stdlib
18 # stdlib
19 import os
19 import os
20 import sys
20 import sys
21 import ctypes
21 import ctypes
22 import msvcrt
22 import msvcrt
23
23
24 from ctypes import c_int, POINTER
24 from ctypes import c_int, POINTER
25 from ctypes.wintypes import LPCWSTR, HLOCAL
25 from ctypes.wintypes import LPCWSTR, HLOCAL
26 from subprocess import STDOUT
26 from subprocess import STDOUT
27
27
28 # our own imports
28 # our own imports
29 from ._process_common import read_no_interrupt, process_handler, arg_split as py_arg_split
29 from ._process_common import read_no_interrupt, process_handler, arg_split as py_arg_split
30 from . import py3compat
30 from . import py3compat
31 from . import text
31 from . import text
32
32
33 #-----------------------------------------------------------------------------
33 #-----------------------------------------------------------------------------
34 # Function definitions
34 # Function definitions
35 #-----------------------------------------------------------------------------
35 #-----------------------------------------------------------------------------
36
36
37 class AvoidUNCPath(object):
37 class AvoidUNCPath(object):
38 """A context manager to protect command execution from UNC paths.
38 """A context manager to protect command execution from UNC paths.
39
39
40 In the Win32 API, commands can't be invoked with the cwd being a UNC path.
40 In the Win32 API, commands can't be invoked with the cwd being a UNC path.
41 This context manager temporarily changes directory to the 'C:' drive on
41 This context manager temporarily changes directory to the 'C:' drive on
42 entering, and restores the original working directory on exit.
42 entering, and restores the original working directory on exit.
43
43
44 The context manager returns the starting working directory *if* it made a
44 The context manager returns the starting working directory *if* it made a
45 change and None otherwise, so that users can apply the necessary adjustment
45 change and None otherwise, so that users can apply the necessary adjustment
46 to their system calls in the event of a change.
46 to their system calls in the event of a change.
47
47
48 Example
48 Example
49 -------
49 -------
50 ::
50 ::
51 cmd = 'dir'
51 cmd = 'dir'
52 with AvoidUNCPath() as path:
52 with AvoidUNCPath() as path:
53 if path is not None:
53 if path is not None:
54 cmd = '"pushd %s &&"%s' % (path, cmd)
54 cmd = '"pushd %s &&"%s' % (path, cmd)
55 os.system(cmd)
55 os.system(cmd)
56 """
56 """
57 def __enter__(self):
57 def __enter__(self):
58 self.path = os.getcwdu()
58 self.path = os.getcwdu()
59 self.is_unc_path = self.path.startswith(r"\\")
59 self.is_unc_path = self.path.startswith(r"\\")
60 if self.is_unc_path:
60 if self.is_unc_path:
61 # change to c drive (as cmd.exe cannot handle UNC addresses)
61 # change to c drive (as cmd.exe cannot handle UNC addresses)
62 os.chdir("C:")
62 os.chdir("C:")
63 return self.path
63 return self.path
64 else:
64 else:
65 # We return None to signal that there was no change in the working
65 # We return None to signal that there was no change in the working
66 # directory
66 # directory
67 return None
67 return None
68
68
69 def __exit__(self, exc_type, exc_value, traceback):
69 def __exit__(self, exc_type, exc_value, traceback):
70 if self.is_unc_path:
70 if self.is_unc_path:
71 os.chdir(self.path)
71 os.chdir(self.path)
72
72
73
73
74 def _find_cmd(cmd):
74 def _find_cmd(cmd):
75 """Find the full path to a .bat or .exe using the win32api module."""
75 """Find the full path to a .bat or .exe using the win32api module."""
76 try:
76 try:
77 from win32api import SearchPath
77 from win32api import SearchPath
78 except ImportError:
78 except ImportError:
79 raise ImportError('you need to have pywin32 installed for this to work')
79 raise ImportError('you need to have pywin32 installed for this to work')
80 else:
80 else:
81 PATH = os.environ['PATH']
81 PATH = os.environ['PATH']
82 extensions = ['.exe', '.com', '.bat', '.py']
82 extensions = ['.exe', '.com', '.bat', '.py']
83 path = None
83 path = None
84 for ext in extensions:
84 for ext in extensions:
85 try:
85 try:
86 path = SearchPath(PATH, cmd + ext)[0]
86 path = SearchPath(PATH, cmd + ext)[0]
87 except:
87 except:
88 pass
88 pass
89 if path is None:
89 if path is None:
90 raise OSError("command %r not found" % cmd)
90 raise OSError("command %r not found" % cmd)
91 else:
91 else:
92 return path
92 return path
93
93
94
94
95 def _system_body(p):
95 def _system_body(p):
96 """Callback for _system."""
96 """Callback for _system."""
97 enc = text.getdefaultencoding()
97 enc = py3compat.getdefaultencoding()
98 for line in read_no_interrupt(p.stdout).splitlines():
98 for line in read_no_interrupt(p.stdout).splitlines():
99 line = line.decode(enc, 'replace')
99 line = line.decode(enc, 'replace')
100 print(line, file=sys.stdout)
100 print(line, file=sys.stdout)
101 for line in read_no_interrupt(p.stderr).splitlines():
101 for line in read_no_interrupt(p.stderr).splitlines():
102 line = line.decode(enc, 'replace')
102 line = line.decode(enc, 'replace')
103 print(line, file=sys.stderr)
103 print(line, file=sys.stderr)
104
104
105 # Wait to finish for returncode
105 # Wait to finish for returncode
106 return p.wait()
106 return p.wait()
107
107
108
108
109 def system(cmd):
109 def system(cmd):
110 """Win32 version of os.system() that works with network shares.
110 """Win32 version of os.system() that works with network shares.
111
111
112 Note that this implementation returns None, as meant for use in IPython.
112 Note that this implementation returns None, as meant for use in IPython.
113
113
114 Parameters
114 Parameters
115 ----------
115 ----------
116 cmd : str
116 cmd : str
117 A command to be executed in the system shell.
117 A command to be executed in the system shell.
118
118
119 Returns
119 Returns
120 -------
120 -------
121 None : we explicitly do NOT return the subprocess status code, as this
121 None : we explicitly do NOT return the subprocess status code, as this
122 utility is meant to be used extensively in IPython, where any return value
122 utility is meant to be used extensively in IPython, where any return value
123 would trigger :func:`sys.displayhook` calls.
123 would trigger :func:`sys.displayhook` calls.
124 """
124 """
125 # The controller provides interactivity with both
125 # The controller provides interactivity with both
126 # stdin and stdout
126 # stdin and stdout
127 import _process_win32_controller
127 import _process_win32_controller
128 _process_win32_controller.system(cmd)
128 _process_win32_controller.system(cmd)
129
129
130
130
131 def getoutput(cmd):
131 def getoutput(cmd):
132 """Return standard output of executing cmd in a shell.
132 """Return standard output of executing cmd in a shell.
133
133
134 Accepts the same arguments as os.system().
134 Accepts the same arguments as os.system().
135
135
136 Parameters
136 Parameters
137 ----------
137 ----------
138 cmd : str
138 cmd : str
139 A command to be executed in the system shell.
139 A command to be executed in the system shell.
140
140
141 Returns
141 Returns
142 -------
142 -------
143 stdout : str
143 stdout : str
144 """
144 """
145
145
146 with AvoidUNCPath() as path:
146 with AvoidUNCPath() as path:
147 if path is not None:
147 if path is not None:
148 cmd = '"pushd %s &&"%s' % (path, cmd)
148 cmd = '"pushd %s &&"%s' % (path, cmd)
149 out = process_handler(cmd, lambda p: p.communicate()[0], STDOUT)
149 out = process_handler(cmd, lambda p: p.communicate()[0], STDOUT)
150
150
151 if out is None:
151 if out is None:
152 out = ''
152 out = ''
153 return out
153 return out
154
154
155 try:
155 try:
156 CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW
156 CommandLineToArgvW = ctypes.windll.shell32.CommandLineToArgvW
157 CommandLineToArgvW.arg_types = [LPCWSTR, POINTER(c_int)]
157 CommandLineToArgvW.arg_types = [LPCWSTR, POINTER(c_int)]
158 CommandLineToArgvW.res_types = [POINTER(LPCWSTR)]
158 CommandLineToArgvW.res_types = [POINTER(LPCWSTR)]
159 LocalFree = ctypes.windll.kernel32.LocalFree
159 LocalFree = ctypes.windll.kernel32.LocalFree
160 LocalFree.res_type = HLOCAL
160 LocalFree.res_type = HLOCAL
161 LocalFree.arg_types = [HLOCAL]
161 LocalFree.arg_types = [HLOCAL]
162
162
163 def arg_split(commandline, posix=False, strict=True):
163 def arg_split(commandline, posix=False, strict=True):
164 """Split a command line's arguments in a shell-like manner.
164 """Split a command line's arguments in a shell-like manner.
165
165
166 This is a special version for windows that use a ctypes call to CommandLineToArgvW
166 This is a special version for windows that use a ctypes call to CommandLineToArgvW
167 to do the argv splitting. The posix paramter is ignored.
167 to do the argv splitting. The posix paramter is ignored.
168
168
169 If strict=False, process_common.arg_split(...strict=False) is used instead.
169 If strict=False, process_common.arg_split(...strict=False) is used instead.
170 """
170 """
171 #CommandLineToArgvW returns path to executable if called with empty string.
171 #CommandLineToArgvW returns path to executable if called with empty string.
172 if commandline.strip() == "":
172 if commandline.strip() == "":
173 return []
173 return []
174 if not strict:
174 if not strict:
175 # not really a cl-arg, fallback on _process_common
175 # not really a cl-arg, fallback on _process_common
176 return py_arg_split(commandline, posix=posix, strict=strict)
176 return py_arg_split(commandline, posix=posix, strict=strict)
177 argvn = c_int()
177 argvn = c_int()
178 result_pointer = CommandLineToArgvW(py3compat.cast_unicode(commandline.lstrip()), ctypes.byref(argvn))
178 result_pointer = CommandLineToArgvW(py3compat.cast_unicode(commandline.lstrip()), ctypes.byref(argvn))
179 result_array_type = LPCWSTR * argvn.value
179 result_array_type = LPCWSTR * argvn.value
180 result = [arg for arg in result_array_type.from_address(result_pointer)]
180 result = [arg for arg in result_array_type.from_address(result_pointer)]
181 retval = LocalFree(result_pointer)
181 retval = LocalFree(result_pointer)
182 return result
182 return result
183 except AttributeError:
183 except AttributeError:
184 arg_split = py_arg_split
184 arg_split = py_arg_split
@@ -1,322 +1,323 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 """
2 """
3 IO related utilities.
3 IO related utilities.
4 """
4 """
5
5
6 #-----------------------------------------------------------------------------
6 #-----------------------------------------------------------------------------
7 # Copyright (C) 2008-2011 The IPython Development Team
7 # Copyright (C) 2008-2011 The IPython Development Team
8 #
8 #
9 # Distributed under the terms of the BSD License. The full license is in
9 # Distributed under the terms of the BSD License. The full license is in
10 # the file COPYING, distributed as part of this software.
10 # the file COPYING, distributed as part of this software.
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12 from __future__ import print_function
12 from __future__ import print_function
13
13
14 #-----------------------------------------------------------------------------
14 #-----------------------------------------------------------------------------
15 # Imports
15 # Imports
16 #-----------------------------------------------------------------------------
16 #-----------------------------------------------------------------------------
17 import os
17 import sys
18 import sys
18 import tempfile
19 import tempfile
19
20
20 #-----------------------------------------------------------------------------
21 #-----------------------------------------------------------------------------
21 # Code
22 # Code
22 #-----------------------------------------------------------------------------
23 #-----------------------------------------------------------------------------
23
24
24
25
25 class IOStream:
26 class IOStream:
26
27
27 def __init__(self,stream, fallback=None):
28 def __init__(self,stream, fallback=None):
28 if not hasattr(stream,'write') or not hasattr(stream,'flush'):
29 if not hasattr(stream,'write') or not hasattr(stream,'flush'):
29 if fallback is not None:
30 if fallback is not None:
30 stream = fallback
31 stream = fallback
31 else:
32 else:
32 raise ValueError("fallback required, but not specified")
33 raise ValueError("fallback required, but not specified")
33 self.stream = stream
34 self.stream = stream
34 self._swrite = stream.write
35 self._swrite = stream.write
35
36
36 # clone all methods not overridden:
37 # clone all methods not overridden:
37 def clone(meth):
38 def clone(meth):
38 return not hasattr(self, meth) and not meth.startswith('_')
39 return not hasattr(self, meth) and not meth.startswith('_')
39 for meth in filter(clone, dir(stream)):
40 for meth in filter(clone, dir(stream)):
40 setattr(self, meth, getattr(stream, meth))
41 setattr(self, meth, getattr(stream, meth))
41
42
42 def write(self,data):
43 def write(self,data):
43 try:
44 try:
44 self._swrite(data)
45 self._swrite(data)
45 except:
46 except:
46 try:
47 try:
47 # print handles some unicode issues which may trip a plain
48 # print handles some unicode issues which may trip a plain
48 # write() call. Emulate write() by using an empty end
49 # write() call. Emulate write() by using an empty end
49 # argument.
50 # argument.
50 print(data, end='', file=self.stream)
51 print(data, end='', file=self.stream)
51 except:
52 except:
52 # if we get here, something is seriously broken.
53 # if we get here, something is seriously broken.
53 print('ERROR - failed to write data to stream:', self.stream,
54 print('ERROR - failed to write data to stream:', self.stream,
54 file=sys.stderr)
55 file=sys.stderr)
55
56
56 def writelines(self, lines):
57 def writelines(self, lines):
57 if isinstance(lines, basestring):
58 if isinstance(lines, basestring):
58 lines = [lines]
59 lines = [lines]
59 for line in lines:
60 for line in lines:
60 self.write(line)
61 self.write(line)
61
62
62 # This class used to have a writeln method, but regular files and streams
63 # This class used to have a writeln method, but regular files and streams
63 # in Python don't have this method. We need to keep this completely
64 # in Python don't have this method. We need to keep this completely
64 # compatible so we removed it.
65 # compatible so we removed it.
65
66
66 @property
67 @property
67 def closed(self):
68 def closed(self):
68 return self.stream.closed
69 return self.stream.closed
69
70
70 def close(self):
71 def close(self):
71 pass
72 pass
72
73
73 # setup stdin/stdout/stderr to sys.stdin/sys.stdout/sys.stderr
74 # setup stdin/stdout/stderr to sys.stdin/sys.stdout/sys.stderr
74 devnull = open(os.devnull, 'a')
75 devnull = open(os.devnull, 'a')
75 stdin = IOStream(sys.stdin, fallback=devnull)
76 stdin = IOStream(sys.stdin, fallback=devnull)
76 stdout = IOStream(sys.stdout, fallback=devnull)
77 stdout = IOStream(sys.stdout, fallback=devnull)
77 stderr = IOStream(sys.stderr, fallback=devnull)
78 stderr = IOStream(sys.stderr, fallback=devnull)
78
79
79 class IOTerm:
80 class IOTerm:
80 """ Term holds the file or file-like objects for handling I/O operations.
81 """ Term holds the file or file-like objects for handling I/O operations.
81
82
82 These are normally just sys.stdin, sys.stdout and sys.stderr but for
83 These are normally just sys.stdin, sys.stdout and sys.stderr but for
83 Windows they can can replaced to allow editing the strings before they are
84 Windows they can can replaced to allow editing the strings before they are
84 displayed."""
85 displayed."""
85
86
86 # In the future, having IPython channel all its I/O operations through
87 # In the future, having IPython channel all its I/O operations through
87 # this class will make it easier to embed it into other environments which
88 # this class will make it easier to embed it into other environments which
88 # are not a normal terminal (such as a GUI-based shell)
89 # are not a normal terminal (such as a GUI-based shell)
89 def __init__(self, stdin=None, stdout=None, stderr=None):
90 def __init__(self, stdin=None, stdout=None, stderr=None):
90 mymodule = sys.modules[__name__]
91 mymodule = sys.modules[__name__]
91 self.stdin = IOStream(stdin, mymodule.stdin)
92 self.stdin = IOStream(stdin, mymodule.stdin)
92 self.stdout = IOStream(stdout, mymodule.stdout)
93 self.stdout = IOStream(stdout, mymodule.stdout)
93 self.stderr = IOStream(stderr, mymodule.stderr)
94 self.stderr = IOStream(stderr, mymodule.stderr)
94
95
95
96
96 class Tee(object):
97 class Tee(object):
97 """A class to duplicate an output stream to stdout/err.
98 """A class to duplicate an output stream to stdout/err.
98
99
99 This works in a manner very similar to the Unix 'tee' command.
100 This works in a manner very similar to the Unix 'tee' command.
100
101
101 When the object is closed or deleted, it closes the original file given to
102 When the object is closed or deleted, it closes the original file given to
102 it for duplication.
103 it for duplication.
103 """
104 """
104 # Inspired by:
105 # Inspired by:
105 # http://mail.python.org/pipermail/python-list/2007-May/442737.html
106 # http://mail.python.org/pipermail/python-list/2007-May/442737.html
106
107
107 def __init__(self, file_or_name, mode="w", channel='stdout'):
108 def __init__(self, file_or_name, mode="w", channel='stdout'):
108 """Construct a new Tee object.
109 """Construct a new Tee object.
109
110
110 Parameters
111 Parameters
111 ----------
112 ----------
112 file_or_name : filename or open filehandle (writable)
113 file_or_name : filename or open filehandle (writable)
113 File that will be duplicated
114 File that will be duplicated
114
115
115 mode : optional, valid mode for open().
116 mode : optional, valid mode for open().
116 If a filename was give, open with this mode.
117 If a filename was give, open with this mode.
117
118
118 channel : str, one of ['stdout', 'stderr']
119 channel : str, one of ['stdout', 'stderr']
119 """
120 """
120 if channel not in ['stdout', 'stderr']:
121 if channel not in ['stdout', 'stderr']:
121 raise ValueError('Invalid channel spec %s' % channel)
122 raise ValueError('Invalid channel spec %s' % channel)
122
123
123 if hasattr(file_or_name, 'write') and hasattr(file_or_name, 'seek'):
124 if hasattr(file_or_name, 'write') and hasattr(file_or_name, 'seek'):
124 self.file = file_or_name
125 self.file = file_or_name
125 else:
126 else:
126 self.file = open(file_or_name, mode)
127 self.file = open(file_or_name, mode)
127 self.channel = channel
128 self.channel = channel
128 self.ostream = getattr(sys, channel)
129 self.ostream = getattr(sys, channel)
129 setattr(sys, channel, self)
130 setattr(sys, channel, self)
130 self._closed = False
131 self._closed = False
131
132
132 def close(self):
133 def close(self):
133 """Close the file and restore the channel."""
134 """Close the file and restore the channel."""
134 self.flush()
135 self.flush()
135 setattr(sys, self.channel, self.ostream)
136 setattr(sys, self.channel, self.ostream)
136 self.file.close()
137 self.file.close()
137 self._closed = True
138 self._closed = True
138
139
139 def write(self, data):
140 def write(self, data):
140 """Write data to both channels."""
141 """Write data to both channels."""
141 self.file.write(data)
142 self.file.write(data)
142 self.ostream.write(data)
143 self.ostream.write(data)
143 self.ostream.flush()
144 self.ostream.flush()
144
145
145 def flush(self):
146 def flush(self):
146 """Flush both channels."""
147 """Flush both channels."""
147 self.file.flush()
148 self.file.flush()
148 self.ostream.flush()
149 self.ostream.flush()
149
150
150 def __del__(self):
151 def __del__(self):
151 if not self._closed:
152 if not self._closed:
152 self.close()
153 self.close()
153
154
154
155
155 def file_read(filename):
156 def file_read(filename):
156 """Read a file and close it. Returns the file source."""
157 """Read a file and close it. Returns the file source."""
157 fobj = open(filename,'r');
158 fobj = open(filename,'r');
158 source = fobj.read();
159 source = fobj.read();
159 fobj.close()
160 fobj.close()
160 return source
161 return source
161
162
162
163
163 def file_readlines(filename):
164 def file_readlines(filename):
164 """Read a file and close it. Returns the file source using readlines()."""
165 """Read a file and close it. Returns the file source using readlines()."""
165 fobj = open(filename,'r');
166 fobj = open(filename,'r');
166 lines = fobj.readlines();
167 lines = fobj.readlines();
167 fobj.close()
168 fobj.close()
168 return lines
169 return lines
169
170
170
171
171 def raw_input_multi(header='', ps1='==> ', ps2='..> ',terminate_str = '.'):
172 def raw_input_multi(header='', ps1='==> ', ps2='..> ',terminate_str = '.'):
172 """Take multiple lines of input.
173 """Take multiple lines of input.
173
174
174 A list with each line of input as a separate element is returned when a
175 A list with each line of input as a separate element is returned when a
175 termination string is entered (defaults to a single '.'). Input can also
176 termination string is entered (defaults to a single '.'). Input can also
176 terminate via EOF (^D in Unix, ^Z-RET in Windows).
177 terminate via EOF (^D in Unix, ^Z-RET in Windows).
177
178
178 Lines of input which end in \\ are joined into single entries (and a
179 Lines of input which end in \\ are joined into single entries (and a
179 secondary continuation prompt is issued as long as the user terminates
180 secondary continuation prompt is issued as long as the user terminates
180 lines with \\). This allows entering very long strings which are still
181 lines with \\). This allows entering very long strings which are still
181 meant to be treated as single entities.
182 meant to be treated as single entities.
182 """
183 """
183
184
184 try:
185 try:
185 if header:
186 if header:
186 header += '\n'
187 header += '\n'
187 lines = [raw_input(header + ps1)]
188 lines = [raw_input(header + ps1)]
188 except EOFError:
189 except EOFError:
189 return []
190 return []
190 terminate = [terminate_str]
191 terminate = [terminate_str]
191 try:
192 try:
192 while lines[-1:] != terminate:
193 while lines[-1:] != terminate:
193 new_line = raw_input(ps1)
194 new_line = raw_input(ps1)
194 while new_line.endswith('\\'):
195 while new_line.endswith('\\'):
195 new_line = new_line[:-1] + raw_input(ps2)
196 new_line = new_line[:-1] + raw_input(ps2)
196 lines.append(new_line)
197 lines.append(new_line)
197
198
198 return lines[:-1] # don't return the termination command
199 return lines[:-1] # don't return the termination command
199 except EOFError:
200 except EOFError:
200 print()
201 print()
201 return lines
202 return lines
202
203
203
204
204 def raw_input_ext(prompt='', ps2='... '):
205 def raw_input_ext(prompt='', ps2='... '):
205 """Similar to raw_input(), but accepts extended lines if input ends with \\."""
206 """Similar to raw_input(), but accepts extended lines if input ends with \\."""
206
207
207 line = raw_input(prompt)
208 line = raw_input(prompt)
208 while line.endswith('\\'):
209 while line.endswith('\\'):
209 line = line[:-1] + raw_input(ps2)
210 line = line[:-1] + raw_input(ps2)
210 return line
211 return line
211
212
212
213
213 def ask_yes_no(prompt,default=None):
214 def ask_yes_no(prompt,default=None):
214 """Asks a question and returns a boolean (y/n) answer.
215 """Asks a question and returns a boolean (y/n) answer.
215
216
216 If default is given (one of 'y','n'), it is used if the user input is
217 If default is given (one of 'y','n'), it is used if the user input is
217 empty. Otherwise the question is repeated until an answer is given.
218 empty. Otherwise the question is repeated until an answer is given.
218
219
219 An EOF is treated as the default answer. If there is no default, an
220 An EOF is treated as the default answer. If there is no default, an
220 exception is raised to prevent infinite loops.
221 exception is raised to prevent infinite loops.
221
222
222 Valid answers are: y/yes/n/no (match is not case sensitive)."""
223 Valid answers are: y/yes/n/no (match is not case sensitive)."""
223
224
224 answers = {'y':True,'n':False,'yes':True,'no':False}
225 answers = {'y':True,'n':False,'yes':True,'no':False}
225 ans = None
226 ans = None
226 while ans not in answers.keys():
227 while ans not in answers.keys():
227 try:
228 try:
228 ans = raw_input(prompt+' ').lower()
229 ans = raw_input(prompt+' ').lower()
229 if not ans: # response was an empty string
230 if not ans: # response was an empty string
230 ans = default
231 ans = default
231 except KeyboardInterrupt:
232 except KeyboardInterrupt:
232 pass
233 pass
233 except EOFError:
234 except EOFError:
234 if default in answers.keys():
235 if default in answers.keys():
235 ans = default
236 ans = default
236 print()
237 print()
237 else:
238 else:
238 raise
239 raise
239
240
240 return answers[ans]
241 return answers[ans]
241
242
242
243
243 class NLprinter:
244 class NLprinter:
244 """Print an arbitrarily nested list, indicating index numbers.
245 """Print an arbitrarily nested list, indicating index numbers.
245
246
246 An instance of this class called nlprint is available and callable as a
247 An instance of this class called nlprint is available and callable as a
247 function.
248 function.
248
249
249 nlprint(list,indent=' ',sep=': ') -> prints indenting each level by 'indent'
250 nlprint(list,indent=' ',sep=': ') -> prints indenting each level by 'indent'
250 and using 'sep' to separate the index from the value. """
251 and using 'sep' to separate the index from the value. """
251
252
252 def __init__(self):
253 def __init__(self):
253 self.depth = 0
254 self.depth = 0
254
255
255 def __call__(self,lst,pos='',**kw):
256 def __call__(self,lst,pos='',**kw):
256 """Prints the nested list numbering levels."""
257 """Prints the nested list numbering levels."""
257 kw.setdefault('indent',' ')
258 kw.setdefault('indent',' ')
258 kw.setdefault('sep',': ')
259 kw.setdefault('sep',': ')
259 kw.setdefault('start',0)
260 kw.setdefault('start',0)
260 kw.setdefault('stop',len(lst))
261 kw.setdefault('stop',len(lst))
261 # we need to remove start and stop from kw so they don't propagate
262 # we need to remove start and stop from kw so they don't propagate
262 # into a recursive call for a nested list.
263 # into a recursive call for a nested list.
263 start = kw['start']; del kw['start']
264 start = kw['start']; del kw['start']
264 stop = kw['stop']; del kw['stop']
265 stop = kw['stop']; del kw['stop']
265 if self.depth == 0 and 'header' in kw.keys():
266 if self.depth == 0 and 'header' in kw.keys():
266 print(kw['header'])
267 print(kw['header'])
267
268
268 for idx in range(start,stop):
269 for idx in range(start,stop):
269 elem = lst[idx]
270 elem = lst[idx]
270 newpos = pos + str(idx)
271 newpos = pos + str(idx)
271 if type(elem)==type([]):
272 if type(elem)==type([]):
272 self.depth += 1
273 self.depth += 1
273 self.__call__(elem, newpos+",", **kw)
274 self.__call__(elem, newpos+",", **kw)
274 self.depth -= 1
275 self.depth -= 1
275 else:
276 else:
276 print(kw['indent']*self.depth + newpos + kw["sep"] + repr(elem))
277 print(kw['indent']*self.depth + newpos + kw["sep"] + repr(elem))
277
278
278 nlprint = NLprinter()
279 nlprint = NLprinter()
279
280
280
281
281 def temp_pyfile(src, ext='.py'):
282 def temp_pyfile(src, ext='.py'):
282 """Make a temporary python file, return filename and filehandle.
283 """Make a temporary python file, return filename and filehandle.
283
284
284 Parameters
285 Parameters
285 ----------
286 ----------
286 src : string or list of strings (no need for ending newlines if list)
287 src : string or list of strings (no need for ending newlines if list)
287 Source code to be written to the file.
288 Source code to be written to the file.
288
289
289 ext : optional, string
290 ext : optional, string
290 Extension for the generated file.
291 Extension for the generated file.
291
292
292 Returns
293 Returns
293 -------
294 -------
294 (filename, open filehandle)
295 (filename, open filehandle)
295 It is the caller's responsibility to close the open file and unlink it.
296 It is the caller's responsibility to close the open file and unlink it.
296 """
297 """
297 fname = tempfile.mkstemp(ext)[1]
298 fname = tempfile.mkstemp(ext)[1]
298 f = open(fname,'w')
299 f = open(fname,'w')
299 f.write(src)
300 f.write(src)
300 f.flush()
301 f.flush()
301 return fname, f
302 return fname, f
302
303
303
304
304 def raw_print(*args, **kw):
305 def raw_print(*args, **kw):
305 """Raw print to sys.__stdout__, otherwise identical interface to print()."""
306 """Raw print to sys.__stdout__, otherwise identical interface to print()."""
306
307
307 print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
308 print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
308 file=sys.__stdout__)
309 file=sys.__stdout__)
309 sys.__stdout__.flush()
310 sys.__stdout__.flush()
310
311
311
312
312 def raw_print_err(*args, **kw):
313 def raw_print_err(*args, **kw):
313 """Raw print to sys.__stderr__, otherwise identical interface to print()."""
314 """Raw print to sys.__stderr__, otherwise identical interface to print()."""
314
315
315 print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
316 print(*args, sep=kw.get('sep', ' '), end=kw.get('end', '\n'),
316 file=sys.__stderr__)
317 file=sys.__stderr__)
317 sys.__stderr__.flush()
318 sys.__stderr__.flush()
318
319
319
320
320 # Short aliases for quick debugging, do NOT use these in production code.
321 # Short aliases for quick debugging, do NOT use these in production code.
321 rprint = raw_print
322 rprint = raw_print
322 rprinte = raw_print_err
323 rprinte = raw_print_err
@@ -1,165 +1,165 b''
1 """Utilities to manipulate JSON objects.
1 """Utilities to manipulate JSON objects.
2 """
2 """
3 #-----------------------------------------------------------------------------
3 #-----------------------------------------------------------------------------
4 # Copyright (C) 2010-2011 The IPython Development Team
4 # Copyright (C) 2010-2011 The IPython Development Team
5 #
5 #
6 # Distributed under the terms of the BSD License. The full license is in
6 # Distributed under the terms of the BSD License. The full license is in
7 # the file COPYING.txt, distributed as part of this software.
7 # the file COPYING.txt, distributed as part of this software.
8 #-----------------------------------------------------------------------------
8 #-----------------------------------------------------------------------------
9
9
10 #-----------------------------------------------------------------------------
10 #-----------------------------------------------------------------------------
11 # Imports
11 # Imports
12 #-----------------------------------------------------------------------------
12 #-----------------------------------------------------------------------------
13 # stdlib
13 # stdlib
14 import re
14 import re
15 import sys
15 import sys
16 import types
16 import types
17 from datetime import datetime
17 from datetime import datetime
18
18
19 from IPython.utils import py3compat
19 from IPython.utils import py3compat
20 from IPython.utils import text
20 from IPython.utils import text
21 next_attr_name = '__next__' if py3compat.PY3 else 'next'
21 next_attr_name = '__next__' if py3compat.PY3 else 'next'
22
22
23 #-----------------------------------------------------------------------------
23 #-----------------------------------------------------------------------------
24 # Globals and constants
24 # Globals and constants
25 #-----------------------------------------------------------------------------
25 #-----------------------------------------------------------------------------
26
26
27 # timestamp formats
27 # timestamp formats
28 ISO8601="%Y-%m-%dT%H:%M:%S.%f"
28 ISO8601="%Y-%m-%dT%H:%M:%S.%f"
29 ISO8601_PAT=re.compile(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+$")
29 ISO8601_PAT=re.compile(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d+$")
30
30
31 #-----------------------------------------------------------------------------
31 #-----------------------------------------------------------------------------
32 # Classes and functions
32 # Classes and functions
33 #-----------------------------------------------------------------------------
33 #-----------------------------------------------------------------------------
34
34
35 def rekey(dikt):
35 def rekey(dikt):
36 """Rekey a dict that has been forced to use str keys where there should be
36 """Rekey a dict that has been forced to use str keys where there should be
37 ints by json."""
37 ints by json."""
38 for k in dikt.iterkeys():
38 for k in dikt.iterkeys():
39 if isinstance(k, basestring):
39 if isinstance(k, basestring):
40 ik=fk=None
40 ik=fk=None
41 try:
41 try:
42 ik = int(k)
42 ik = int(k)
43 except ValueError:
43 except ValueError:
44 try:
44 try:
45 fk = float(k)
45 fk = float(k)
46 except ValueError:
46 except ValueError:
47 continue
47 continue
48 if ik is not None:
48 if ik is not None:
49 nk = ik
49 nk = ik
50 else:
50 else:
51 nk = fk
51 nk = fk
52 if nk in dikt:
52 if nk in dikt:
53 raise KeyError("already have key %r"%nk)
53 raise KeyError("already have key %r"%nk)
54 dikt[nk] = dikt.pop(k)
54 dikt[nk] = dikt.pop(k)
55 return dikt
55 return dikt
56
56
57
57
58 def extract_dates(obj):
58 def extract_dates(obj):
59 """extract ISO8601 dates from unpacked JSON"""
59 """extract ISO8601 dates from unpacked JSON"""
60 if isinstance(obj, dict):
60 if isinstance(obj, dict):
61 obj = dict(obj) # don't clobber
61 obj = dict(obj) # don't clobber
62 for k,v in obj.iteritems():
62 for k,v in obj.iteritems():
63 obj[k] = extract_dates(v)
63 obj[k] = extract_dates(v)
64 elif isinstance(obj, (list, tuple)):
64 elif isinstance(obj, (list, tuple)):
65 obj = [ extract_dates(o) for o in obj ]
65 obj = [ extract_dates(o) for o in obj ]
66 elif isinstance(obj, basestring):
66 elif isinstance(obj, basestring):
67 if ISO8601_PAT.match(obj):
67 if ISO8601_PAT.match(obj):
68 obj = datetime.strptime(obj, ISO8601)
68 obj = datetime.strptime(obj, ISO8601)
69 return obj
69 return obj
70
70
71 def squash_dates(obj):
71 def squash_dates(obj):
72 """squash datetime objects into ISO8601 strings"""
72 """squash datetime objects into ISO8601 strings"""
73 if isinstance(obj, dict):
73 if isinstance(obj, dict):
74 obj = dict(obj) # don't clobber
74 obj = dict(obj) # don't clobber
75 for k,v in obj.iteritems():
75 for k,v in obj.iteritems():
76 obj[k] = squash_dates(v)
76 obj[k] = squash_dates(v)
77 elif isinstance(obj, (list, tuple)):
77 elif isinstance(obj, (list, tuple)):
78 obj = [ squash_dates(o) for o in obj ]
78 obj = [ squash_dates(o) for o in obj ]
79 elif isinstance(obj, datetime):
79 elif isinstance(obj, datetime):
80 obj = obj.strftime(ISO8601)
80 obj = obj.strftime(ISO8601)
81 return obj
81 return obj
82
82
83 def date_default(obj):
83 def date_default(obj):
84 """default function for packing datetime objects in JSON."""
84 """default function for packing datetime objects in JSON."""
85 if isinstance(obj, datetime):
85 if isinstance(obj, datetime):
86 return obj.strftime(ISO8601)
86 return obj.strftime(ISO8601)
87 else:
87 else:
88 raise TypeError("%r is not JSON serializable"%obj)
88 raise TypeError("%r is not JSON serializable"%obj)
89
89
90
90
91
91
92 def json_clean(obj):
92 def json_clean(obj):
93 """Clean an object to ensure it's safe to encode in JSON.
93 """Clean an object to ensure it's safe to encode in JSON.
94
94
95 Atomic, immutable objects are returned unmodified. Sets and tuples are
95 Atomic, immutable objects are returned unmodified. Sets and tuples are
96 converted to lists, lists are copied and dicts are also copied.
96 converted to lists, lists are copied and dicts are also copied.
97
97
98 Note: dicts whose keys could cause collisions upon encoding (such as a dict
98 Note: dicts whose keys could cause collisions upon encoding (such as a dict
99 with both the number 1 and the string '1' as keys) will cause a ValueError
99 with both the number 1 and the string '1' as keys) will cause a ValueError
100 to be raised.
100 to be raised.
101
101
102 Parameters
102 Parameters
103 ----------
103 ----------
104 obj : any python object
104 obj : any python object
105
105
106 Returns
106 Returns
107 -------
107 -------
108 out : object
108 out : object
109
109
110 A version of the input which will not cause an encoding error when
110 A version of the input which will not cause an encoding error when
111 encoded as JSON. Note that this function does not *encode* its inputs,
111 encoded as JSON. Note that this function does not *encode* its inputs,
112 it simply sanitizes it so that there will be no encoding errors later.
112 it simply sanitizes it so that there will be no encoding errors later.
113
113
114 Examples
114 Examples
115 --------
115 --------
116 >>> json_clean(4)
116 >>> json_clean(4)
117 4
117 4
118 >>> json_clean(range(10))
118 >>> json_clean(range(10))
119 [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
119 [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
120 >>> json_clean(dict(x=1, y=2))
120 >>> json_clean(dict(x=1, y=2))
121 {'y': 2, 'x': 1}
121 {'y': 2, 'x': 1}
122 >>> json_clean(dict(x=1, y=2, z=[1,2,3]))
122 >>> json_clean(dict(x=1, y=2, z=[1,2,3]))
123 {'y': 2, 'x': 1, 'z': [1, 2, 3]}
123 {'y': 2, 'x': 1, 'z': [1, 2, 3]}
124 >>> json_clean(True)
124 >>> json_clean(True)
125 True
125 True
126 """
126 """
127 # types that are 'atomic' and ok in json as-is. bool doesn't need to be
127 # types that are 'atomic' and ok in json as-is. bool doesn't need to be
128 # listed explicitly because bools pass as int instances
128 # listed explicitly because bools pass as int instances
129 atomic_ok = (unicode, int, float, types.NoneType)
129 atomic_ok = (unicode, int, float, types.NoneType)
130
130
131 # containers that we need to convert into lists
131 # containers that we need to convert into lists
132 container_to_list = (tuple, set, types.GeneratorType)
132 container_to_list = (tuple, set, types.GeneratorType)
133
133
134 if isinstance(obj, atomic_ok):
134 if isinstance(obj, atomic_ok):
135 return obj
135 return obj
136
136
137 if isinstance(obj, bytes):
137 if isinstance(obj, bytes):
138 return obj.decode(text.getdefaultencoding(), 'replace')
138 return obj.decode(py3compat.getdefaultencoding(), 'replace')
139
139
140 if isinstance(obj, container_to_list) or (
140 if isinstance(obj, container_to_list) or (
141 hasattr(obj, '__iter__') and hasattr(obj, next_attr_name)):
141 hasattr(obj, '__iter__') and hasattr(obj, next_attr_name)):
142 obj = list(obj)
142 obj = list(obj)
143
143
144 if isinstance(obj, list):
144 if isinstance(obj, list):
145 return [json_clean(x) for x in obj]
145 return [json_clean(x) for x in obj]
146
146
147 if isinstance(obj, dict):
147 if isinstance(obj, dict):
148 # First, validate that the dict won't lose data in conversion due to
148 # First, validate that the dict won't lose data in conversion due to
149 # key collisions after stringification. This can happen with keys like
149 # key collisions after stringification. This can happen with keys like
150 # True and 'true' or 1 and '1', which collide in JSON.
150 # True and 'true' or 1 and '1', which collide in JSON.
151 nkeys = len(obj)
151 nkeys = len(obj)
152 nkeys_collapsed = len(set(map(str, obj)))
152 nkeys_collapsed = len(set(map(str, obj)))
153 if nkeys != nkeys_collapsed:
153 if nkeys != nkeys_collapsed:
154 raise ValueError('dict can not be safely converted to JSON: '
154 raise ValueError('dict can not be safely converted to JSON: '
155 'key collision would lead to dropped values')
155 'key collision would lead to dropped values')
156 # If all OK, proceed by making the new dict that will be json-safe
156 # If all OK, proceed by making the new dict that will be json-safe
157 out = {}
157 out = {}
158 for k,v in obj.iteritems():
158 for k,v in obj.iteritems():
159 out[str(k)] = json_clean(v)
159 out[str(k)] = json_clean(v)
160 return out
160 return out
161
161
162 # If we get here, we don't know how to handle the object, so we just get
162 # If we get here, we don't know how to handle the object, so we just get
163 # its repr and return that. This will catch lambdas, open sockets, class
163 # its repr and return that. This will catch lambdas, open sockets, class
164 # objects, and any other complicated contraption that json can't encode
164 # objects, and any other complicated contraption that json can't encode
165 return repr(obj)
165 return repr(obj)
@@ -1,183 +1,207 b''
1 # coding: utf-8
1 # coding: utf-8
2 """Compatibility tricks for Python 3. Mainly to do with unicode."""
2 """Compatibility tricks for Python 3. Mainly to do with unicode."""
3 import __builtin__
3 import __builtin__
4 import functools
4 import functools
5 import sys
5 import sys
6 import re
6 import re
7 import types
7 import types
8 import locale
8
9
9 orig_open = open
10 orig_open = open
10
11
11 def no_code(x, encoding=None):
12 def no_code(x, encoding=None):
12 return x
13 return x
13
14
14 # to deal with the possibility of sys.std* not being a stream at all
15 # to deal with the possibility of sys.std* not being a stream at all
15 def get_stream_enc(stream, default=None):
16 def get_stream_enc(stream, default=None):
16 if not hasattr(stream, 'encoding') or not stream.encoding:
17 if not hasattr(stream, 'encoding') or not stream.encoding:
17 return default
18 return default
18 else:
19 else:
19 return stream.encoding
20 return stream.encoding
20
21
22 # Less conservative replacement for sys.getdefaultencoding, that will try
23 # to match the environment.
24 # Defined here as central function, so if we find better choices, we
25 # won't need to make changes all over IPython.
26 def getdefaultencoding():
27 """Return IPython's guess for the default encoding for bytes as text.
28
29 Asks for stdin.encoding first, to match the calling Terminal, but that
30 is often None for subprocesses. Fall back on locale.getpreferredencoding()
31 which should be a sensible platform default (that respects LANG environment),
32 and finally to sys.getdefaultencoding() which is the most conservative option,
33 and usually ASCII.
34 """
35 enc = get_stream_enc(sys.stdin)
36 if not enc or enc=='ascii':
37 try:
38 # There are reports of getpreferredencoding raising errors
39 # in some cases, which may well be fixed, but let's be conservative here.
40 enc = locale.getpreferredencoding()
41 except Exception:
42 pass
43 return enc or sys.getdefaultencoding()
44
21 def decode(s, encoding=None):
45 def decode(s, encoding=None):
22 encoding = get_stream_enc(sys.stdin, encoding) or sys.getdefaultencoding()
46 encoding = get_stream_enc(sys.stdin, encoding) or getdefaultencoding()
23 return s.decode(encoding, "replace")
47 return s.decode(encoding, "replace")
24
48
25 def encode(u, encoding=None):
49 def encode(u, encoding=None):
26 encoding = get_stream_enc(sys.stdin, encoding) or sys.getdefaultencoding()
50 encoding = get_stream_enc(sys.stdin, encoding) or getdefaultencoding()
27 return u.encode(encoding, "replace")
51 return u.encode(encoding, "replace")
28
52
29
53
30 def cast_unicode(s, encoding=None):
54 def cast_unicode(s, encoding=None):
31 if isinstance(s, bytes):
55 if isinstance(s, bytes):
32 return decode(s, encoding)
56 return decode(s, encoding)
33 return s
57 return s
34
58
35 def cast_bytes(s, encoding=None):
59 def cast_bytes(s, encoding=None):
36 if not isinstance(s, bytes):
60 if not isinstance(s, bytes):
37 return encode(s, encoding)
61 return encode(s, encoding)
38 return s
62 return s
39
63
40 def _modify_str_or_docstring(str_change_func):
64 def _modify_str_or_docstring(str_change_func):
41 @functools.wraps(str_change_func)
65 @functools.wraps(str_change_func)
42 def wrapper(func_or_str):
66 def wrapper(func_or_str):
43 if isinstance(func_or_str, basestring):
67 if isinstance(func_or_str, basestring):
44 func = None
68 func = None
45 doc = func_or_str
69 doc = func_or_str
46 else:
70 else:
47 func = func_or_str
71 func = func_or_str
48 doc = func.__doc__
72 doc = func.__doc__
49
73
50 doc = str_change_func(doc)
74 doc = str_change_func(doc)
51
75
52 if func:
76 if func:
53 func.__doc__ = doc
77 func.__doc__ = doc
54 return func
78 return func
55 return doc
79 return doc
56 return wrapper
80 return wrapper
57
81
58 if sys.version_info[0] >= 3:
82 if sys.version_info[0] >= 3:
59 PY3 = True
83 PY3 = True
60
84
61 input = input
85 input = input
62 builtin_mod_name = "builtins"
86 builtin_mod_name = "builtins"
63
87
64 str_to_unicode = no_code
88 str_to_unicode = no_code
65 unicode_to_str = no_code
89 unicode_to_str = no_code
66 str_to_bytes = encode
90 str_to_bytes = encode
67 bytes_to_str = decode
91 bytes_to_str = decode
68 cast_bytes_py2 = no_code
92 cast_bytes_py2 = no_code
69
93
70 def isidentifier(s, dotted=False):
94 def isidentifier(s, dotted=False):
71 if dotted:
95 if dotted:
72 return all(isidentifier(a) for a in s.split("."))
96 return all(isidentifier(a) for a in s.split("."))
73 return s.isidentifier()
97 return s.isidentifier()
74
98
75 open = orig_open
99 open = orig_open
76
100
77 MethodType = types.MethodType
101 MethodType = types.MethodType
78
102
79 def execfile(fname, glob, loc=None):
103 def execfile(fname, glob, loc=None):
80 loc = loc if (loc is not None) else glob
104 loc = loc if (loc is not None) else glob
81 exec compile(open(fname, 'rb').read(), fname, 'exec') in glob, loc
105 exec compile(open(fname, 'rb').read(), fname, 'exec') in glob, loc
82
106
83 # Refactor print statements in doctests.
107 # Refactor print statements in doctests.
84 _print_statement_re = re.compile(r"\bprint (?P<expr>.*)$", re.MULTILINE)
108 _print_statement_re = re.compile(r"\bprint (?P<expr>.*)$", re.MULTILINE)
85 def _print_statement_sub(match):
109 def _print_statement_sub(match):
86 expr = match.groups('expr')
110 expr = match.groups('expr')
87 return "print(%s)" % expr
111 return "print(%s)" % expr
88
112
89 @_modify_str_or_docstring
113 @_modify_str_or_docstring
90 def doctest_refactor_print(doc):
114 def doctest_refactor_print(doc):
91 """Refactor 'print x' statements in a doctest to print(x) style. 2to3
115 """Refactor 'print x' statements in a doctest to print(x) style. 2to3
92 unfortunately doesn't pick up on our doctests.
116 unfortunately doesn't pick up on our doctests.
93
117
94 Can accept a string or a function, so it can be used as a decorator."""
118 Can accept a string or a function, so it can be used as a decorator."""
95 return _print_statement_re.sub(_print_statement_sub, doc)
119 return _print_statement_re.sub(_print_statement_sub, doc)
96
120
97 # Abstract u'abc' syntax:
121 # Abstract u'abc' syntax:
98 @_modify_str_or_docstring
122 @_modify_str_or_docstring
99 def u_format(s):
123 def u_format(s):
100 """"{u}'abc'" --> "'abc'" (Python 3)
124 """"{u}'abc'" --> "'abc'" (Python 3)
101
125
102 Accepts a string or a function, so it can be used as a decorator."""
126 Accepts a string or a function, so it can be used as a decorator."""
103 return s.format(u='')
127 return s.format(u='')
104
128
105 else:
129 else:
106 PY3 = False
130 PY3 = False
107
131
108 input = raw_input
132 input = raw_input
109 builtin_mod_name = "__builtin__"
133 builtin_mod_name = "__builtin__"
110
134
111 str_to_unicode = decode
135 str_to_unicode = decode
112 unicode_to_str = encode
136 unicode_to_str = encode
113 str_to_bytes = no_code
137 str_to_bytes = no_code
114 bytes_to_str = no_code
138 bytes_to_str = no_code
115 cast_bytes_py2 = cast_bytes
139 cast_bytes_py2 = cast_bytes
116
140
117 import re
141 import re
118 _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$")
142 _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$")
119 def isidentifier(s, dotted=False):
143 def isidentifier(s, dotted=False):
120 if dotted:
144 if dotted:
121 return all(isidentifier(a) for a in s.split("."))
145 return all(isidentifier(a) for a in s.split("."))
122 return bool(_name_re.match(s))
146 return bool(_name_re.match(s))
123
147
124 class open(object):
148 class open(object):
125 """Wrapper providing key part of Python 3 open() interface."""
149 """Wrapper providing key part of Python 3 open() interface."""
126 def __init__(self, fname, mode="r", encoding="utf-8"):
150 def __init__(self, fname, mode="r", encoding="utf-8"):
127 self.f = orig_open(fname, mode)
151 self.f = orig_open(fname, mode)
128 self.enc = encoding
152 self.enc = encoding
129
153
130 def write(self, s):
154 def write(self, s):
131 return self.f.write(s.encode(self.enc))
155 return self.f.write(s.encode(self.enc))
132
156
133 def read(self, size=-1):
157 def read(self, size=-1):
134 return self.f.read(size).decode(self.enc)
158 return self.f.read(size).decode(self.enc)
135
159
136 def close(self):
160 def close(self):
137 return self.f.close()
161 return self.f.close()
138
162
139 def __enter__(self):
163 def __enter__(self):
140 return self
164 return self
141
165
142 def __exit__(self, etype, value, traceback):
166 def __exit__(self, etype, value, traceback):
143 self.f.close()
167 self.f.close()
144
168
145 def MethodType(func, instance):
169 def MethodType(func, instance):
146 return types.MethodType(func, instance, type(instance))
170 return types.MethodType(func, instance, type(instance))
147
171
148 # don't override system execfile on 2.x:
172 # don't override system execfile on 2.x:
149 execfile = execfile
173 execfile = execfile
150
174
151 def doctest_refactor_print(func_or_str):
175 def doctest_refactor_print(func_or_str):
152 return func_or_str
176 return func_or_str
153
177
154
178
155 # Abstract u'abc' syntax:
179 # Abstract u'abc' syntax:
156 @_modify_str_or_docstring
180 @_modify_str_or_docstring
157 def u_format(s):
181 def u_format(s):
158 """"{u}'abc'" --> "u'abc'" (Python 2)
182 """"{u}'abc'" --> "u'abc'" (Python 2)
159
183
160 Accepts a string or a function, so it can be used as a decorator."""
184 Accepts a string or a function, so it can be used as a decorator."""
161 return s.format(u='u')
185 return s.format(u='u')
162
186
163 if sys.platform == 'win32':
187 if sys.platform == 'win32':
164 def execfile(fname, glob=None, loc=None):
188 def execfile(fname, glob=None, loc=None):
165 loc = loc if (loc is not None) else glob
189 loc = loc if (loc is not None) else glob
166 # The rstrip() is necessary b/c trailing whitespace in files will
190 # The rstrip() is necessary b/c trailing whitespace in files will
167 # cause an IndentationError in Python 2.6 (this was fixed in 2.7,
191 # cause an IndentationError in Python 2.6 (this was fixed in 2.7,
168 # but we still support 2.6). See issue 1027.
192 # but we still support 2.6). See issue 1027.
169 scripttext = __builtin__.open(fname).read().rstrip() + '\n'
193 scripttext = __builtin__.open(fname).read().rstrip() + '\n'
170 # compile converts unicode filename to str assuming
194 # compile converts unicode filename to str assuming
171 # ascii. Let's do the conversion before calling compile
195 # ascii. Let's do the conversion before calling compile
172 if isinstance(fname, unicode):
196 if isinstance(fname, unicode):
173 filename = unicode_to_str(fname)
197 filename = unicode_to_str(fname)
174 else:
198 else:
175 filename = fname
199 filename = fname
176 exec compile(scripttext, filename, 'exec') in glob, loc
200 exec compile(scripttext, filename, 'exec') in glob, loc
177 else:
201 else:
178 def execfile(fname, *where):
202 def execfile(fname, *where):
179 if isinstance(fname, unicode):
203 if isinstance(fname, unicode):
180 filename = fname.encode(sys.getfilesystemencoding())
204 filename = fname.encode(sys.getfilesystemencoding())
181 else:
205 else:
182 filename = fname
206 filename = fname
183 __builtin__.execfile(filename, *where)
207 __builtin__.execfile(filename, *where)
@@ -1,760 +1,736 b''
1 # encoding: utf-8
1 # encoding: utf-8
2 """
2 """
3 Utilities for working with strings and text.
3 Utilities for working with strings and text.
4 """
4 """
5
5
6 #-----------------------------------------------------------------------------
6 #-----------------------------------------------------------------------------
7 # Copyright (C) 2008-2011 The IPython Development Team
7 # Copyright (C) 2008-2011 The IPython Development Team
8 #
8 #
9 # Distributed under the terms of the BSD License. The full license is in
9 # Distributed under the terms of the BSD License. The full license is in
10 # the file COPYING, distributed as part of this software.
10 # the file COPYING, distributed as part of this software.
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12
12
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Imports
14 # Imports
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16
16
17 import __main__
17 import __main__
18
18
19 import locale
20 import os
19 import os
21 import re
20 import re
22 import shutil
21 import shutil
23 import sys
22 import sys
24 import textwrap
23 import textwrap
25 from string import Formatter
24 from string import Formatter
26
25
27 from IPython.external.path import path
26 from IPython.external.path import path
28 from IPython.testing.skipdoctest import skip_doctest_py3
27 from IPython.testing.skipdoctest import skip_doctest_py3
29 from IPython.utils import py3compat
28 from IPython.utils import py3compat
30 from IPython.utils.io import nlprint
29 from IPython.utils.io import nlprint
31 from IPython.utils.data import flatten
30 from IPython.utils.data import flatten
32
31
33 #-----------------------------------------------------------------------------
32 #-----------------------------------------------------------------------------
34 # Code
33 # Code
35 #-----------------------------------------------------------------------------
34 #-----------------------------------------------------------------------------
36
35
37 # Less conservative replacement for sys.getdefaultencoding, that will try
38 # to match the environment.
39 # Defined here as central function, so if we find better choices, we
40 # won't need to make changes all over IPython.
41 def getdefaultencoding():
42 """Return IPython's guess for the default encoding for bytes as text.
43
44 Asks for stdin.encoding first, to match the calling Terminal, but that
45 is often None for subprocesses. Fall back on locale.getpreferredencoding()
46 which should be a sensible platform default (that respects LANG environment),
47 and finally to sys.getdefaultencoding() which is the most conservative option,
48 and usually ASCII.
49 """
50 enc = py3compat.get_stream_enc(sys.stdin)
51 if not enc or enc=='ascii':
52 try:
53 # There are reports of getpreferredencoding raising errors
54 # in some cases, which may well be fixed, but let's be conservative here.
55 enc = locale.getpreferredencoding()
56 except Exception:
57 pass
58 return enc or sys.getdefaultencoding()
59
60 def unquote_ends(istr):
36 def unquote_ends(istr):
61 """Remove a single pair of quotes from the endpoints of a string."""
37 """Remove a single pair of quotes from the endpoints of a string."""
62
38
63 if not istr:
39 if not istr:
64 return istr
40 return istr
65 if (istr[0]=="'" and istr[-1]=="'") or \
41 if (istr[0]=="'" and istr[-1]=="'") or \
66 (istr[0]=='"' and istr[-1]=='"'):
42 (istr[0]=='"' and istr[-1]=='"'):
67 return istr[1:-1]
43 return istr[1:-1]
68 else:
44 else:
69 return istr
45 return istr
70
46
71
47
72 class LSString(str):
48 class LSString(str):
73 """String derivative with a special access attributes.
49 """String derivative with a special access attributes.
74
50
75 These are normal strings, but with the special attributes:
51 These are normal strings, but with the special attributes:
76
52
77 .l (or .list) : value as list (split on newlines).
53 .l (or .list) : value as list (split on newlines).
78 .n (or .nlstr): original value (the string itself).
54 .n (or .nlstr): original value (the string itself).
79 .s (or .spstr): value as whitespace-separated string.
55 .s (or .spstr): value as whitespace-separated string.
80 .p (or .paths): list of path objects
56 .p (or .paths): list of path objects
81
57
82 Any values which require transformations are computed only once and
58 Any values which require transformations are computed only once and
83 cached.
59 cached.
84
60
85 Such strings are very useful to efficiently interact with the shell, which
61 Such strings are very useful to efficiently interact with the shell, which
86 typically only understands whitespace-separated options for commands."""
62 typically only understands whitespace-separated options for commands."""
87
63
88 def get_list(self):
64 def get_list(self):
89 try:
65 try:
90 return self.__list
66 return self.__list
91 except AttributeError:
67 except AttributeError:
92 self.__list = self.split('\n')
68 self.__list = self.split('\n')
93 return self.__list
69 return self.__list
94
70
95 l = list = property(get_list)
71 l = list = property(get_list)
96
72
97 def get_spstr(self):
73 def get_spstr(self):
98 try:
74 try:
99 return self.__spstr
75 return self.__spstr
100 except AttributeError:
76 except AttributeError:
101 self.__spstr = self.replace('\n',' ')
77 self.__spstr = self.replace('\n',' ')
102 return self.__spstr
78 return self.__spstr
103
79
104 s = spstr = property(get_spstr)
80 s = spstr = property(get_spstr)
105
81
106 def get_nlstr(self):
82 def get_nlstr(self):
107 return self
83 return self
108
84
109 n = nlstr = property(get_nlstr)
85 n = nlstr = property(get_nlstr)
110
86
111 def get_paths(self):
87 def get_paths(self):
112 try:
88 try:
113 return self.__paths
89 return self.__paths
114 except AttributeError:
90 except AttributeError:
115 self.__paths = [path(p) for p in self.split('\n') if os.path.exists(p)]
91 self.__paths = [path(p) for p in self.split('\n') if os.path.exists(p)]
116 return self.__paths
92 return self.__paths
117
93
118 p = paths = property(get_paths)
94 p = paths = property(get_paths)
119
95
120 # FIXME: We need to reimplement type specific displayhook and then add this
96 # FIXME: We need to reimplement type specific displayhook and then add this
121 # back as a custom printer. This should also be moved outside utils into the
97 # back as a custom printer. This should also be moved outside utils into the
122 # core.
98 # core.
123
99
124 # def print_lsstring(arg):
100 # def print_lsstring(arg):
125 # """ Prettier (non-repr-like) and more informative printer for LSString """
101 # """ Prettier (non-repr-like) and more informative printer for LSString """
126 # print "LSString (.p, .n, .l, .s available). Value:"
102 # print "LSString (.p, .n, .l, .s available). Value:"
127 # print arg
103 # print arg
128 #
104 #
129 #
105 #
130 # print_lsstring = result_display.when_type(LSString)(print_lsstring)
106 # print_lsstring = result_display.when_type(LSString)(print_lsstring)
131
107
132
108
133 class SList(list):
109 class SList(list):
134 """List derivative with a special access attributes.
110 """List derivative with a special access attributes.
135
111
136 These are normal lists, but with the special attributes:
112 These are normal lists, but with the special attributes:
137
113
138 .l (or .list) : value as list (the list itself).
114 .l (or .list) : value as list (the list itself).
139 .n (or .nlstr): value as a string, joined on newlines.
115 .n (or .nlstr): value as a string, joined on newlines.
140 .s (or .spstr): value as a string, joined on spaces.
116 .s (or .spstr): value as a string, joined on spaces.
141 .p (or .paths): list of path objects
117 .p (or .paths): list of path objects
142
118
143 Any values which require transformations are computed only once and
119 Any values which require transformations are computed only once and
144 cached."""
120 cached."""
145
121
146 def get_list(self):
122 def get_list(self):
147 return self
123 return self
148
124
149 l = list = property(get_list)
125 l = list = property(get_list)
150
126
151 def get_spstr(self):
127 def get_spstr(self):
152 try:
128 try:
153 return self.__spstr
129 return self.__spstr
154 except AttributeError:
130 except AttributeError:
155 self.__spstr = ' '.join(self)
131 self.__spstr = ' '.join(self)
156 return self.__spstr
132 return self.__spstr
157
133
158 s = spstr = property(get_spstr)
134 s = spstr = property(get_spstr)
159
135
160 def get_nlstr(self):
136 def get_nlstr(self):
161 try:
137 try:
162 return self.__nlstr
138 return self.__nlstr
163 except AttributeError:
139 except AttributeError:
164 self.__nlstr = '\n'.join(self)
140 self.__nlstr = '\n'.join(self)
165 return self.__nlstr
141 return self.__nlstr
166
142
167 n = nlstr = property(get_nlstr)
143 n = nlstr = property(get_nlstr)
168
144
169 def get_paths(self):
145 def get_paths(self):
170 try:
146 try:
171 return self.__paths
147 return self.__paths
172 except AttributeError:
148 except AttributeError:
173 self.__paths = [path(p) for p in self if os.path.exists(p)]
149 self.__paths = [path(p) for p in self if os.path.exists(p)]
174 return self.__paths
150 return self.__paths
175
151
176 p = paths = property(get_paths)
152 p = paths = property(get_paths)
177
153
178 def grep(self, pattern, prune = False, field = None):
154 def grep(self, pattern, prune = False, field = None):
179 """ Return all strings matching 'pattern' (a regex or callable)
155 """ Return all strings matching 'pattern' (a regex or callable)
180
156
181 This is case-insensitive. If prune is true, return all items
157 This is case-insensitive. If prune is true, return all items
182 NOT matching the pattern.
158 NOT matching the pattern.
183
159
184 If field is specified, the match must occur in the specified
160 If field is specified, the match must occur in the specified
185 whitespace-separated field.
161 whitespace-separated field.
186
162
187 Examples::
163 Examples::
188
164
189 a.grep( lambda x: x.startswith('C') )
165 a.grep( lambda x: x.startswith('C') )
190 a.grep('Cha.*log', prune=1)
166 a.grep('Cha.*log', prune=1)
191 a.grep('chm', field=-1)
167 a.grep('chm', field=-1)
192 """
168 """
193
169
194 def match_target(s):
170 def match_target(s):
195 if field is None:
171 if field is None:
196 return s
172 return s
197 parts = s.split()
173 parts = s.split()
198 try:
174 try:
199 tgt = parts[field]
175 tgt = parts[field]
200 return tgt
176 return tgt
201 except IndexError:
177 except IndexError:
202 return ""
178 return ""
203
179
204 if isinstance(pattern, basestring):
180 if isinstance(pattern, basestring):
205 pred = lambda x : re.search(pattern, x, re.IGNORECASE)
181 pred = lambda x : re.search(pattern, x, re.IGNORECASE)
206 else:
182 else:
207 pred = pattern
183 pred = pattern
208 if not prune:
184 if not prune:
209 return SList([el for el in self if pred(match_target(el))])
185 return SList([el for el in self if pred(match_target(el))])
210 else:
186 else:
211 return SList([el for el in self if not pred(match_target(el))])
187 return SList([el for el in self if not pred(match_target(el))])
212
188
213 def fields(self, *fields):
189 def fields(self, *fields):
214 """ Collect whitespace-separated fields from string list
190 """ Collect whitespace-separated fields from string list
215
191
216 Allows quick awk-like usage of string lists.
192 Allows quick awk-like usage of string lists.
217
193
218 Example data (in var a, created by 'a = !ls -l')::
194 Example data (in var a, created by 'a = !ls -l')::
219 -rwxrwxrwx 1 ville None 18 Dec 14 2006 ChangeLog
195 -rwxrwxrwx 1 ville None 18 Dec 14 2006 ChangeLog
220 drwxrwxrwx+ 6 ville None 0 Oct 24 18:05 IPython
196 drwxrwxrwx+ 6 ville None 0 Oct 24 18:05 IPython
221
197
222 a.fields(0) is ['-rwxrwxrwx', 'drwxrwxrwx+']
198 a.fields(0) is ['-rwxrwxrwx', 'drwxrwxrwx+']
223 a.fields(1,0) is ['1 -rwxrwxrwx', '6 drwxrwxrwx+']
199 a.fields(1,0) is ['1 -rwxrwxrwx', '6 drwxrwxrwx+']
224 (note the joining by space).
200 (note the joining by space).
225 a.fields(-1) is ['ChangeLog', 'IPython']
201 a.fields(-1) is ['ChangeLog', 'IPython']
226
202
227 IndexErrors are ignored.
203 IndexErrors are ignored.
228
204
229 Without args, fields() just split()'s the strings.
205 Without args, fields() just split()'s the strings.
230 """
206 """
231 if len(fields) == 0:
207 if len(fields) == 0:
232 return [el.split() for el in self]
208 return [el.split() for el in self]
233
209
234 res = SList()
210 res = SList()
235 for el in [f.split() for f in self]:
211 for el in [f.split() for f in self]:
236 lineparts = []
212 lineparts = []
237
213
238 for fd in fields:
214 for fd in fields:
239 try:
215 try:
240 lineparts.append(el[fd])
216 lineparts.append(el[fd])
241 except IndexError:
217 except IndexError:
242 pass
218 pass
243 if lineparts:
219 if lineparts:
244 res.append(" ".join(lineparts))
220 res.append(" ".join(lineparts))
245
221
246 return res
222 return res
247
223
248 def sort(self,field= None, nums = False):
224 def sort(self,field= None, nums = False):
249 """ sort by specified fields (see fields())
225 """ sort by specified fields (see fields())
250
226
251 Example::
227 Example::
252 a.sort(1, nums = True)
228 a.sort(1, nums = True)
253
229
254 Sorts a by second field, in numerical order (so that 21 > 3)
230 Sorts a by second field, in numerical order (so that 21 > 3)
255
231
256 """
232 """
257
233
258 #decorate, sort, undecorate
234 #decorate, sort, undecorate
259 if field is not None:
235 if field is not None:
260 dsu = [[SList([line]).fields(field), line] for line in self]
236 dsu = [[SList([line]).fields(field), line] for line in self]
261 else:
237 else:
262 dsu = [[line, line] for line in self]
238 dsu = [[line, line] for line in self]
263 if nums:
239 if nums:
264 for i in range(len(dsu)):
240 for i in range(len(dsu)):
265 numstr = "".join([ch for ch in dsu[i][0] if ch.isdigit()])
241 numstr = "".join([ch for ch in dsu[i][0] if ch.isdigit()])
266 try:
242 try:
267 n = int(numstr)
243 n = int(numstr)
268 except ValueError:
244 except ValueError:
269 n = 0;
245 n = 0;
270 dsu[i][0] = n
246 dsu[i][0] = n
271
247
272
248
273 dsu.sort()
249 dsu.sort()
274 return SList([t[1] for t in dsu])
250 return SList([t[1] for t in dsu])
275
251
276
252
277 # FIXME: We need to reimplement type specific displayhook and then add this
253 # FIXME: We need to reimplement type specific displayhook and then add this
278 # back as a custom printer. This should also be moved outside utils into the
254 # back as a custom printer. This should also be moved outside utils into the
279 # core.
255 # core.
280
256
281 # def print_slist(arg):
257 # def print_slist(arg):
282 # """ Prettier (non-repr-like) and more informative printer for SList """
258 # """ Prettier (non-repr-like) and more informative printer for SList """
283 # print "SList (.p, .n, .l, .s, .grep(), .fields(), sort() available):"
259 # print "SList (.p, .n, .l, .s, .grep(), .fields(), sort() available):"
284 # if hasattr(arg, 'hideonce') and arg.hideonce:
260 # if hasattr(arg, 'hideonce') and arg.hideonce:
285 # arg.hideonce = False
261 # arg.hideonce = False
286 # return
262 # return
287 #
263 #
288 # nlprint(arg)
264 # nlprint(arg)
289 #
265 #
290 # print_slist = result_display.when_type(SList)(print_slist)
266 # print_slist = result_display.when_type(SList)(print_slist)
291
267
292
268
293 def esc_quotes(strng):
269 def esc_quotes(strng):
294 """Return the input string with single and double quotes escaped out"""
270 """Return the input string with single and double quotes escaped out"""
295
271
296 return strng.replace('"','\\"').replace("'","\\'")
272 return strng.replace('"','\\"').replace("'","\\'")
297
273
298
274
299 def qw(words,flat=0,sep=None,maxsplit=-1):
275 def qw(words,flat=0,sep=None,maxsplit=-1):
300 """Similar to Perl's qw() operator, but with some more options.
276 """Similar to Perl's qw() operator, but with some more options.
301
277
302 qw(words,flat=0,sep=' ',maxsplit=-1) -> words.split(sep,maxsplit)
278 qw(words,flat=0,sep=' ',maxsplit=-1) -> words.split(sep,maxsplit)
303
279
304 words can also be a list itself, and with flat=1, the output will be
280 words can also be a list itself, and with flat=1, the output will be
305 recursively flattened.
281 recursively flattened.
306
282
307 Examples:
283 Examples:
308
284
309 >>> qw('1 2')
285 >>> qw('1 2')
310 ['1', '2']
286 ['1', '2']
311
287
312 >>> qw(['a b','1 2',['m n','p q']])
288 >>> qw(['a b','1 2',['m n','p q']])
313 [['a', 'b'], ['1', '2'], [['m', 'n'], ['p', 'q']]]
289 [['a', 'b'], ['1', '2'], [['m', 'n'], ['p', 'q']]]
314
290
315 >>> qw(['a b','1 2',['m n','p q']],flat=1)
291 >>> qw(['a b','1 2',['m n','p q']],flat=1)
316 ['a', 'b', '1', '2', 'm', 'n', 'p', 'q']
292 ['a', 'b', '1', '2', 'm', 'n', 'p', 'q']
317 """
293 """
318
294
319 if isinstance(words, basestring):
295 if isinstance(words, basestring):
320 return [word.strip() for word in words.split(sep,maxsplit)
296 return [word.strip() for word in words.split(sep,maxsplit)
321 if word and not word.isspace() ]
297 if word and not word.isspace() ]
322 if flat:
298 if flat:
323 return flatten(map(qw,words,[1]*len(words)))
299 return flatten(map(qw,words,[1]*len(words)))
324 return map(qw,words)
300 return map(qw,words)
325
301
326
302
327 def qwflat(words,sep=None,maxsplit=-1):
303 def qwflat(words,sep=None,maxsplit=-1):
328 """Calls qw(words) in flat mode. It's just a convenient shorthand."""
304 """Calls qw(words) in flat mode. It's just a convenient shorthand."""
329 return qw(words,1,sep,maxsplit)
305 return qw(words,1,sep,maxsplit)
330
306
331
307
332 def qw_lol(indata):
308 def qw_lol(indata):
333 """qw_lol('a b') -> [['a','b']],
309 """qw_lol('a b') -> [['a','b']],
334 otherwise it's just a call to qw().
310 otherwise it's just a call to qw().
335
311
336 We need this to make sure the modules_some keys *always* end up as a
312 We need this to make sure the modules_some keys *always* end up as a
337 list of lists."""
313 list of lists."""
338
314
339 if isinstance(indata, basestring):
315 if isinstance(indata, basestring):
340 return [qw(indata)]
316 return [qw(indata)]
341 else:
317 else:
342 return qw(indata)
318 return qw(indata)
343
319
344
320
345 def grep(pat,list,case=1):
321 def grep(pat,list,case=1):
346 """Simple minded grep-like function.
322 """Simple minded grep-like function.
347 grep(pat,list) returns occurrences of pat in list, None on failure.
323 grep(pat,list) returns occurrences of pat in list, None on failure.
348
324
349 It only does simple string matching, with no support for regexps. Use the
325 It only does simple string matching, with no support for regexps. Use the
350 option case=0 for case-insensitive matching."""
326 option case=0 for case-insensitive matching."""
351
327
352 # This is pretty crude. At least it should implement copying only references
328 # This is pretty crude. At least it should implement copying only references
353 # to the original data in case it's big. Now it copies the data for output.
329 # to the original data in case it's big. Now it copies the data for output.
354 out=[]
330 out=[]
355 if case:
331 if case:
356 for term in list:
332 for term in list:
357 if term.find(pat)>-1: out.append(term)
333 if term.find(pat)>-1: out.append(term)
358 else:
334 else:
359 lpat=pat.lower()
335 lpat=pat.lower()
360 for term in list:
336 for term in list:
361 if term.lower().find(lpat)>-1: out.append(term)
337 if term.lower().find(lpat)>-1: out.append(term)
362
338
363 if len(out): return out
339 if len(out): return out
364 else: return None
340 else: return None
365
341
366
342
367 def dgrep(pat,*opts):
343 def dgrep(pat,*opts):
368 """Return grep() on dir()+dir(__builtins__).
344 """Return grep() on dir()+dir(__builtins__).
369
345
370 A very common use of grep() when working interactively."""
346 A very common use of grep() when working interactively."""
371
347
372 return grep(pat,dir(__main__)+dir(__main__.__builtins__),*opts)
348 return grep(pat,dir(__main__)+dir(__main__.__builtins__),*opts)
373
349
374
350
375 def idgrep(pat):
351 def idgrep(pat):
376 """Case-insensitive dgrep()"""
352 """Case-insensitive dgrep()"""
377
353
378 return dgrep(pat,0)
354 return dgrep(pat,0)
379
355
380
356
381 def igrep(pat,list):
357 def igrep(pat,list):
382 """Synonym for case-insensitive grep."""
358 """Synonym for case-insensitive grep."""
383
359
384 return grep(pat,list,case=0)
360 return grep(pat,list,case=0)
385
361
386
362
387 def indent(instr,nspaces=4, ntabs=0, flatten=False):
363 def indent(instr,nspaces=4, ntabs=0, flatten=False):
388 """Indent a string a given number of spaces or tabstops.
364 """Indent a string a given number of spaces or tabstops.
389
365
390 indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces.
366 indent(str,nspaces=4,ntabs=0) -> indent str by ntabs+nspaces.
391
367
392 Parameters
368 Parameters
393 ----------
369 ----------
394
370
395 instr : basestring
371 instr : basestring
396 The string to be indented.
372 The string to be indented.
397 nspaces : int (default: 4)
373 nspaces : int (default: 4)
398 The number of spaces to be indented.
374 The number of spaces to be indented.
399 ntabs : int (default: 0)
375 ntabs : int (default: 0)
400 The number of tabs to be indented.
376 The number of tabs to be indented.
401 flatten : bool (default: False)
377 flatten : bool (default: False)
402 Whether to scrub existing indentation. If True, all lines will be
378 Whether to scrub existing indentation. If True, all lines will be
403 aligned to the same indentation. If False, existing indentation will
379 aligned to the same indentation. If False, existing indentation will
404 be strictly increased.
380 be strictly increased.
405
381
406 Returns
382 Returns
407 -------
383 -------
408
384
409 str|unicode : string indented by ntabs and nspaces.
385 str|unicode : string indented by ntabs and nspaces.
410
386
411 """
387 """
412 if instr is None:
388 if instr is None:
413 return
389 return
414 ind = '\t'*ntabs+' '*nspaces
390 ind = '\t'*ntabs+' '*nspaces
415 if flatten:
391 if flatten:
416 pat = re.compile(r'^\s*', re.MULTILINE)
392 pat = re.compile(r'^\s*', re.MULTILINE)
417 else:
393 else:
418 pat = re.compile(r'^', re.MULTILINE)
394 pat = re.compile(r'^', re.MULTILINE)
419 outstr = re.sub(pat, ind, instr)
395 outstr = re.sub(pat, ind, instr)
420 if outstr.endswith(os.linesep+ind):
396 if outstr.endswith(os.linesep+ind):
421 return outstr[:-len(ind)]
397 return outstr[:-len(ind)]
422 else:
398 else:
423 return outstr
399 return outstr
424
400
425 def native_line_ends(filename,backup=1):
401 def native_line_ends(filename,backup=1):
426 """Convert (in-place) a file to line-ends native to the current OS.
402 """Convert (in-place) a file to line-ends native to the current OS.
427
403
428 If the optional backup argument is given as false, no backup of the
404 If the optional backup argument is given as false, no backup of the
429 original file is left. """
405 original file is left. """
430
406
431 backup_suffixes = {'posix':'~','dos':'.bak','nt':'.bak','mac':'.bak'}
407 backup_suffixes = {'posix':'~','dos':'.bak','nt':'.bak','mac':'.bak'}
432
408
433 bak_filename = filename + backup_suffixes[os.name]
409 bak_filename = filename + backup_suffixes[os.name]
434
410
435 original = open(filename).read()
411 original = open(filename).read()
436 shutil.copy2(filename,bak_filename)
412 shutil.copy2(filename,bak_filename)
437 try:
413 try:
438 new = open(filename,'wb')
414 new = open(filename,'wb')
439 new.write(os.linesep.join(original.splitlines()))
415 new.write(os.linesep.join(original.splitlines()))
440 new.write(os.linesep) # ALWAYS put an eol at the end of the file
416 new.write(os.linesep) # ALWAYS put an eol at the end of the file
441 new.close()
417 new.close()
442 except:
418 except:
443 os.rename(bak_filename,filename)
419 os.rename(bak_filename,filename)
444 if not backup:
420 if not backup:
445 try:
421 try:
446 os.remove(bak_filename)
422 os.remove(bak_filename)
447 except:
423 except:
448 pass
424 pass
449
425
450
426
451 def list_strings(arg):
427 def list_strings(arg):
452 """Always return a list of strings, given a string or list of strings
428 """Always return a list of strings, given a string or list of strings
453 as input.
429 as input.
454
430
455 :Examples:
431 :Examples:
456
432
457 In [7]: list_strings('A single string')
433 In [7]: list_strings('A single string')
458 Out[7]: ['A single string']
434 Out[7]: ['A single string']
459
435
460 In [8]: list_strings(['A single string in a list'])
436 In [8]: list_strings(['A single string in a list'])
461 Out[8]: ['A single string in a list']
437 Out[8]: ['A single string in a list']
462
438
463 In [9]: list_strings(['A','list','of','strings'])
439 In [9]: list_strings(['A','list','of','strings'])
464 Out[9]: ['A', 'list', 'of', 'strings']
440 Out[9]: ['A', 'list', 'of', 'strings']
465 """
441 """
466
442
467 if isinstance(arg,basestring): return [arg]
443 if isinstance(arg,basestring): return [arg]
468 else: return arg
444 else: return arg
469
445
470
446
471 def marquee(txt='',width=78,mark='*'):
447 def marquee(txt='',width=78,mark='*'):
472 """Return the input string centered in a 'marquee'.
448 """Return the input string centered in a 'marquee'.
473
449
474 :Examples:
450 :Examples:
475
451
476 In [16]: marquee('A test',40)
452 In [16]: marquee('A test',40)
477 Out[16]: '**************** A test ****************'
453 Out[16]: '**************** A test ****************'
478
454
479 In [17]: marquee('A test',40,'-')
455 In [17]: marquee('A test',40,'-')
480 Out[17]: '---------------- A test ----------------'
456 Out[17]: '---------------- A test ----------------'
481
457
482 In [18]: marquee('A test',40,' ')
458 In [18]: marquee('A test',40,' ')
483 Out[18]: ' A test '
459 Out[18]: ' A test '
484
460
485 """
461 """
486 if not txt:
462 if not txt:
487 return (mark*width)[:width]
463 return (mark*width)[:width]
488 nmark = (width-len(txt)-2)//len(mark)//2
464 nmark = (width-len(txt)-2)//len(mark)//2
489 if nmark < 0: nmark =0
465 if nmark < 0: nmark =0
490 marks = mark*nmark
466 marks = mark*nmark
491 return '%s %s %s' % (marks,txt,marks)
467 return '%s %s %s' % (marks,txt,marks)
492
468
493
469
494 ini_spaces_re = re.compile(r'^(\s+)')
470 ini_spaces_re = re.compile(r'^(\s+)')
495
471
496 def num_ini_spaces(strng):
472 def num_ini_spaces(strng):
497 """Return the number of initial spaces in a string"""
473 """Return the number of initial spaces in a string"""
498
474
499 ini_spaces = ini_spaces_re.match(strng)
475 ini_spaces = ini_spaces_re.match(strng)
500 if ini_spaces:
476 if ini_spaces:
501 return ini_spaces.end()
477 return ini_spaces.end()
502 else:
478 else:
503 return 0
479 return 0
504
480
505
481
506 def format_screen(strng):
482 def format_screen(strng):
507 """Format a string for screen printing.
483 """Format a string for screen printing.
508
484
509 This removes some latex-type format codes."""
485 This removes some latex-type format codes."""
510 # Paragraph continue
486 # Paragraph continue
511 par_re = re.compile(r'\\$',re.MULTILINE)
487 par_re = re.compile(r'\\$',re.MULTILINE)
512 strng = par_re.sub('',strng)
488 strng = par_re.sub('',strng)
513 return strng
489 return strng
514
490
515 def dedent(text):
491 def dedent(text):
516 """Equivalent of textwrap.dedent that ignores unindented first line.
492 """Equivalent of textwrap.dedent that ignores unindented first line.
517
493
518 This means it will still dedent strings like:
494 This means it will still dedent strings like:
519 '''foo
495 '''foo
520 is a bar
496 is a bar
521 '''
497 '''
522
498
523 For use in wrap_paragraphs.
499 For use in wrap_paragraphs.
524 """
500 """
525
501
526 if text.startswith('\n'):
502 if text.startswith('\n'):
527 # text starts with blank line, don't ignore the first line
503 # text starts with blank line, don't ignore the first line
528 return textwrap.dedent(text)
504 return textwrap.dedent(text)
529
505
530 # split first line
506 # split first line
531 splits = text.split('\n',1)
507 splits = text.split('\n',1)
532 if len(splits) == 1:
508 if len(splits) == 1:
533 # only one line
509 # only one line
534 return textwrap.dedent(text)
510 return textwrap.dedent(text)
535
511
536 first, rest = splits
512 first, rest = splits
537 # dedent everything but the first line
513 # dedent everything but the first line
538 rest = textwrap.dedent(rest)
514 rest = textwrap.dedent(rest)
539 return '\n'.join([first, rest])
515 return '\n'.join([first, rest])
540
516
541 def wrap_paragraphs(text, ncols=80):
517 def wrap_paragraphs(text, ncols=80):
542 """Wrap multiple paragraphs to fit a specified width.
518 """Wrap multiple paragraphs to fit a specified width.
543
519
544 This is equivalent to textwrap.wrap, but with support for multiple
520 This is equivalent to textwrap.wrap, but with support for multiple
545 paragraphs, as separated by empty lines.
521 paragraphs, as separated by empty lines.
546
522
547 Returns
523 Returns
548 -------
524 -------
549
525
550 list of complete paragraphs, wrapped to fill `ncols` columns.
526 list of complete paragraphs, wrapped to fill `ncols` columns.
551 """
527 """
552 paragraph_re = re.compile(r'\n(\s*\n)+', re.MULTILINE)
528 paragraph_re = re.compile(r'\n(\s*\n)+', re.MULTILINE)
553 text = dedent(text).strip()
529 text = dedent(text).strip()
554 paragraphs = paragraph_re.split(text)[::2] # every other entry is space
530 paragraphs = paragraph_re.split(text)[::2] # every other entry is space
555 out_ps = []
531 out_ps = []
556 indent_re = re.compile(r'\n\s+', re.MULTILINE)
532 indent_re = re.compile(r'\n\s+', re.MULTILINE)
557 for p in paragraphs:
533 for p in paragraphs:
558 # presume indentation that survives dedent is meaningful formatting,
534 # presume indentation that survives dedent is meaningful formatting,
559 # so don't fill unless text is flush.
535 # so don't fill unless text is flush.
560 if indent_re.search(p) is None:
536 if indent_re.search(p) is None:
561 # wrap paragraph
537 # wrap paragraph
562 p = textwrap.fill(p, ncols)
538 p = textwrap.fill(p, ncols)
563 out_ps.append(p)
539 out_ps.append(p)
564 return out_ps
540 return out_ps
565
541
566
542
567 class EvalFormatter(Formatter):
543 class EvalFormatter(Formatter):
568 """A String Formatter that allows evaluation of simple expressions.
544 """A String Formatter that allows evaluation of simple expressions.
569
545
570 Note that this version interprets a : as specifying a format string (as per
546 Note that this version interprets a : as specifying a format string (as per
571 standard string formatting), so if slicing is required, you must explicitly
547 standard string formatting), so if slicing is required, you must explicitly
572 create a slice.
548 create a slice.
573
549
574 This is to be used in templating cases, such as the parallel batch
550 This is to be used in templating cases, such as the parallel batch
575 script templates, where simple arithmetic on arguments is useful.
551 script templates, where simple arithmetic on arguments is useful.
576
552
577 Examples
553 Examples
578 --------
554 --------
579
555
580 In [1]: f = EvalFormatter()
556 In [1]: f = EvalFormatter()
581 In [2]: f.format('{n//4}', n=8)
557 In [2]: f.format('{n//4}', n=8)
582 Out [2]: '2'
558 Out [2]: '2'
583
559
584 In [3]: f.format("{greeting[slice(2,4)]}", greeting="Hello")
560 In [3]: f.format("{greeting[slice(2,4)]}", greeting="Hello")
585 Out [3]: 'll'
561 Out [3]: 'll'
586 """
562 """
587 def get_field(self, name, args, kwargs):
563 def get_field(self, name, args, kwargs):
588 v = eval(name, kwargs)
564 v = eval(name, kwargs)
589 return v, name
565 return v, name
590
566
591 @skip_doctest_py3
567 @skip_doctest_py3
592 class FullEvalFormatter(Formatter):
568 class FullEvalFormatter(Formatter):
593 """A String Formatter that allows evaluation of simple expressions.
569 """A String Formatter that allows evaluation of simple expressions.
594
570
595 Any time a format key is not found in the kwargs,
571 Any time a format key is not found in the kwargs,
596 it will be tried as an expression in the kwargs namespace.
572 it will be tried as an expression in the kwargs namespace.
597
573
598 Note that this version allows slicing using [1:2], so you cannot specify
574 Note that this version allows slicing using [1:2], so you cannot specify
599 a format string. Use :class:`EvalFormatter` to permit format strings.
575 a format string. Use :class:`EvalFormatter` to permit format strings.
600
576
601 Examples
577 Examples
602 --------
578 --------
603
579
604 In [1]: f = FullEvalFormatter()
580 In [1]: f = FullEvalFormatter()
605 In [2]: f.format('{n//4}', n=8)
581 In [2]: f.format('{n//4}', n=8)
606 Out[2]: u'2'
582 Out[2]: u'2'
607
583
608 In [3]: f.format('{list(range(5))[2:4]}')
584 In [3]: f.format('{list(range(5))[2:4]}')
609 Out[3]: u'[2, 3]'
585 Out[3]: u'[2, 3]'
610
586
611 In [4]: f.format('{3*2}')
587 In [4]: f.format('{3*2}')
612 Out[4]: u'6'
588 Out[4]: u'6'
613 """
589 """
614 # copied from Formatter._vformat with minor changes to allow eval
590 # copied from Formatter._vformat with minor changes to allow eval
615 # and replace the format_spec code with slicing
591 # and replace the format_spec code with slicing
616 def _vformat(self, format_string, args, kwargs, used_args, recursion_depth):
592 def _vformat(self, format_string, args, kwargs, used_args, recursion_depth):
617 if recursion_depth < 0:
593 if recursion_depth < 0:
618 raise ValueError('Max string recursion exceeded')
594 raise ValueError('Max string recursion exceeded')
619 result = []
595 result = []
620 for literal_text, field_name, format_spec, conversion in \
596 for literal_text, field_name, format_spec, conversion in \
621 self.parse(format_string):
597 self.parse(format_string):
622
598
623 # output the literal text
599 # output the literal text
624 if literal_text:
600 if literal_text:
625 result.append(literal_text)
601 result.append(literal_text)
626
602
627 # if there's a field, output it
603 # if there's a field, output it
628 if field_name is not None:
604 if field_name is not None:
629 # this is some markup, find the object and do
605 # this is some markup, find the object and do
630 # the formatting
606 # the formatting
631
607
632 if format_spec:
608 if format_spec:
633 # override format spec, to allow slicing:
609 # override format spec, to allow slicing:
634 field_name = ':'.join([field_name, format_spec])
610 field_name = ':'.join([field_name, format_spec])
635
611
636 # eval the contents of the field for the object
612 # eval the contents of the field for the object
637 # to be formatted
613 # to be formatted
638 obj = eval(field_name, kwargs)
614 obj = eval(field_name, kwargs)
639
615
640 # do any conversion on the resulting object
616 # do any conversion on the resulting object
641 obj = self.convert_field(obj, conversion)
617 obj = self.convert_field(obj, conversion)
642
618
643 # format the object and append to the result
619 # format the object and append to the result
644 result.append(self.format_field(obj, ''))
620 result.append(self.format_field(obj, ''))
645
621
646 return u''.join(py3compat.cast_unicode(s) for s in result)
622 return u''.join(py3compat.cast_unicode(s) for s in result)
647
623
648 @skip_doctest_py3
624 @skip_doctest_py3
649 class DollarFormatter(FullEvalFormatter):
625 class DollarFormatter(FullEvalFormatter):
650 """Formatter allowing Itpl style $foo replacement, for names and attribute
626 """Formatter allowing Itpl style $foo replacement, for names and attribute
651 access only. Standard {foo} replacement also works, and allows full
627 access only. Standard {foo} replacement also works, and allows full
652 evaluation of its arguments.
628 evaluation of its arguments.
653
629
654 Examples
630 Examples
655 --------
631 --------
656 In [1]: f = DollarFormatter()
632 In [1]: f = DollarFormatter()
657 In [2]: f.format('{n//4}', n=8)
633 In [2]: f.format('{n//4}', n=8)
658 Out[2]: u'2'
634 Out[2]: u'2'
659
635
660 In [3]: f.format('23 * 76 is $result', result=23*76)
636 In [3]: f.format('23 * 76 is $result', result=23*76)
661 Out[3]: u'23 * 76 is 1748'
637 Out[3]: u'23 * 76 is 1748'
662
638
663 In [4]: f.format('$a or {b}', a=1, b=2)
639 In [4]: f.format('$a or {b}', a=1, b=2)
664 Out[4]: u'1 or 2'
640 Out[4]: u'1 or 2'
665 """
641 """
666 _dollar_pattern = re.compile("(.*?)\$(\$?[\w\.]+)")
642 _dollar_pattern = re.compile("(.*?)\$(\$?[\w\.]+)")
667 def parse(self, fmt_string):
643 def parse(self, fmt_string):
668 for literal_txt, field_name, format_spec, conversion \
644 for literal_txt, field_name, format_spec, conversion \
669 in Formatter.parse(self, fmt_string):
645 in Formatter.parse(self, fmt_string):
670
646
671 # Find $foo patterns in the literal text.
647 # Find $foo patterns in the literal text.
672 continue_from = 0
648 continue_from = 0
673 txt = ""
649 txt = ""
674 for m in self._dollar_pattern.finditer(literal_txt):
650 for m in self._dollar_pattern.finditer(literal_txt):
675 new_txt, new_field = m.group(1,2)
651 new_txt, new_field = m.group(1,2)
676 # $$foo --> $foo
652 # $$foo --> $foo
677 if new_field.startswith("$"):
653 if new_field.startswith("$"):
678 txt += new_txt + new_field
654 txt += new_txt + new_field
679 else:
655 else:
680 yield (txt + new_txt, new_field, "", None)
656 yield (txt + new_txt, new_field, "", None)
681 txt = ""
657 txt = ""
682 continue_from = m.end()
658 continue_from = m.end()
683
659
684 # Re-yield the {foo} style pattern
660 # Re-yield the {foo} style pattern
685 yield (txt + literal_txt[continue_from:], field_name, format_spec, conversion)
661 yield (txt + literal_txt[continue_from:], field_name, format_spec, conversion)
686
662
687
663
688 def columnize(items, separator=' ', displaywidth=80):
664 def columnize(items, separator=' ', displaywidth=80):
689 """ Transform a list of strings into a single string with columns.
665 """ Transform a list of strings into a single string with columns.
690
666
691 Parameters
667 Parameters
692 ----------
668 ----------
693 items : sequence of strings
669 items : sequence of strings
694 The strings to process.
670 The strings to process.
695
671
696 separator : str, optional [default is two spaces]
672 separator : str, optional [default is two spaces]
697 The string that separates columns.
673 The string that separates columns.
698
674
699 displaywidth : int, optional [default is 80]
675 displaywidth : int, optional [default is 80]
700 Width of the display in number of characters.
676 Width of the display in number of characters.
701
677
702 Returns
678 Returns
703 -------
679 -------
704 The formatted string.
680 The formatted string.
705 """
681 """
706 # Note: this code is adapted from columnize 0.3.2.
682 # Note: this code is adapted from columnize 0.3.2.
707 # See http://code.google.com/p/pycolumnize/
683 # See http://code.google.com/p/pycolumnize/
708
684
709 # Some degenerate cases.
685 # Some degenerate cases.
710 size = len(items)
686 size = len(items)
711 if size == 0:
687 if size == 0:
712 return '\n'
688 return '\n'
713 elif size == 1:
689 elif size == 1:
714 return '%s\n' % items[0]
690 return '%s\n' % items[0]
715
691
716 # Special case: if any item is longer than the maximum width, there's no
692 # Special case: if any item is longer than the maximum width, there's no
717 # point in triggering the logic below...
693 # point in triggering the logic below...
718 item_len = map(len, items) # save these, we can reuse them below
694 item_len = map(len, items) # save these, we can reuse them below
719 longest = max(item_len)
695 longest = max(item_len)
720 if longest >= displaywidth:
696 if longest >= displaywidth:
721 return '\n'.join(items+[''])
697 return '\n'.join(items+[''])
722
698
723 # Try every row count from 1 upwards
699 # Try every row count from 1 upwards
724 array_index = lambda nrows, row, col: nrows*col + row
700 array_index = lambda nrows, row, col: nrows*col + row
725 for nrows in range(1, size):
701 for nrows in range(1, size):
726 ncols = (size + nrows - 1) // nrows
702 ncols = (size + nrows - 1) // nrows
727 colwidths = []
703 colwidths = []
728 totwidth = -len(separator)
704 totwidth = -len(separator)
729 for col in range(ncols):
705 for col in range(ncols):
730 # Get max column width for this column
706 # Get max column width for this column
731 colwidth = 0
707 colwidth = 0
732 for row in range(nrows):
708 for row in range(nrows):
733 i = array_index(nrows, row, col)
709 i = array_index(nrows, row, col)
734 if i >= size: break
710 if i >= size: break
735 x, len_x = items[i], item_len[i]
711 x, len_x = items[i], item_len[i]
736 colwidth = max(colwidth, len_x)
712 colwidth = max(colwidth, len_x)
737 colwidths.append(colwidth)
713 colwidths.append(colwidth)
738 totwidth += colwidth + len(separator)
714 totwidth += colwidth + len(separator)
739 if totwidth > displaywidth:
715 if totwidth > displaywidth:
740 break
716 break
741 if totwidth <= displaywidth:
717 if totwidth <= displaywidth:
742 break
718 break
743
719
744 # The smallest number of rows computed and the max widths for each
720 # The smallest number of rows computed and the max widths for each
745 # column has been obtained. Now we just have to format each of the rows.
721 # column has been obtained. Now we just have to format each of the rows.
746 string = ''
722 string = ''
747 for row in range(nrows):
723 for row in range(nrows):
748 texts = []
724 texts = []
749 for col in range(ncols):
725 for col in range(ncols):
750 i = row + nrows*col
726 i = row + nrows*col
751 if i >= size:
727 if i >= size:
752 texts.append('')
728 texts.append('')
753 else:
729 else:
754 texts.append(items[i])
730 texts.append(items[i])
755 while texts and not texts[-1]:
731 while texts and not texts[-1]:
756 del texts[-1]
732 del texts[-1]
757 for col in range(len(texts)):
733 for col in range(len(texts)):
758 texts[col] = texts[col].ljust(colwidths[col])
734 texts[col] = texts[col].ljust(colwidths[col])
759 string += '%s\n' % separator.join(texts)
735 string += '%s\n' % separator.join(texts)
760 return string
736 return string
@@ -1,91 +1,91 b''
1 import sys
1 import sys
2 import time
2 import time
3 from io import StringIO
3 from io import StringIO
4
4
5 from session import extract_header, Message
5 from session import extract_header, Message
6
6
7 from IPython.utils import io, text
7 from IPython.utils import io, text, py3compat
8
8
9 #-----------------------------------------------------------------------------
9 #-----------------------------------------------------------------------------
10 # Globals
10 # Globals
11 #-----------------------------------------------------------------------------
11 #-----------------------------------------------------------------------------
12
12
13 #-----------------------------------------------------------------------------
13 #-----------------------------------------------------------------------------
14 # Stream classes
14 # Stream classes
15 #-----------------------------------------------------------------------------
15 #-----------------------------------------------------------------------------
16
16
17 class OutStream(object):
17 class OutStream(object):
18 """A file like object that publishes the stream to a 0MQ PUB socket."""
18 """A file like object that publishes the stream to a 0MQ PUB socket."""
19
19
20 # The time interval between automatic flushes, in seconds.
20 # The time interval between automatic flushes, in seconds.
21 flush_interval = 0.05
21 flush_interval = 0.05
22 topic=None
22 topic=None
23
23
24 def __init__(self, session, pub_socket, name):
24 def __init__(self, session, pub_socket, name):
25 self.session = session
25 self.session = session
26 self.pub_socket = pub_socket
26 self.pub_socket = pub_socket
27 self.name = name
27 self.name = name
28 self.parent_header = {}
28 self.parent_header = {}
29 self._new_buffer()
29 self._new_buffer()
30
30
31 def set_parent(self, parent):
31 def set_parent(self, parent):
32 self.parent_header = extract_header(parent)
32 self.parent_header = extract_header(parent)
33
33
34 def close(self):
34 def close(self):
35 self.pub_socket = None
35 self.pub_socket = None
36
36
37 def flush(self):
37 def flush(self):
38 #io.rprint('>>>flushing output buffer: %s<<<' % self.name) # dbg
38 #io.rprint('>>>flushing output buffer: %s<<<' % self.name) # dbg
39 if self.pub_socket is None:
39 if self.pub_socket is None:
40 raise ValueError(u'I/O operation on closed file')
40 raise ValueError(u'I/O operation on closed file')
41 else:
41 else:
42 data = self._buffer.getvalue()
42 data = self._buffer.getvalue()
43 if data:
43 if data:
44 content = {u'name':self.name, u'data':data}
44 content = {u'name':self.name, u'data':data}
45 msg = self.session.send(self.pub_socket, u'stream', content=content,
45 msg = self.session.send(self.pub_socket, u'stream', content=content,
46 parent=self.parent_header, ident=self.topic)
46 parent=self.parent_header, ident=self.topic)
47
47
48 if hasattr(self.pub_socket, 'flush'):
48 if hasattr(self.pub_socket, 'flush'):
49 # socket itself has flush (presumably ZMQStream)
49 # socket itself has flush (presumably ZMQStream)
50 self.pub_socket.flush()
50 self.pub_socket.flush()
51 self._buffer.close()
51 self._buffer.close()
52 self._new_buffer()
52 self._new_buffer()
53
53
54 def isatty(self):
54 def isatty(self):
55 return False
55 return False
56
56
57 def next(self):
57 def next(self):
58 raise IOError('Read not supported on a write only stream.')
58 raise IOError('Read not supported on a write only stream.')
59
59
60 def read(self, size=-1):
60 def read(self, size=-1):
61 raise IOError('Read not supported on a write only stream.')
61 raise IOError('Read not supported on a write only stream.')
62
62
63 def readline(self, size=-1):
63 def readline(self, size=-1):
64 raise IOError('Read not supported on a write only stream.')
64 raise IOError('Read not supported on a write only stream.')
65
65
66 def write(self, string):
66 def write(self, string):
67 if self.pub_socket is None:
67 if self.pub_socket is None:
68 raise ValueError('I/O operation on closed file')
68 raise ValueError('I/O operation on closed file')
69 else:
69 else:
70 # Make sure that we're handling unicode
70 # Make sure that we're handling unicode
71 if not isinstance(string, unicode):
71 if not isinstance(string, unicode):
72 enc = text.getdefaultencoding()
72 enc = py3compat.getdefaultencoding()
73 string = string.decode(enc, 'replace')
73 string = string.decode(enc, 'replace')
74
74
75 self._buffer.write(string)
75 self._buffer.write(string)
76 current_time = time.time()
76 current_time = time.time()
77 if self._start <= 0:
77 if self._start <= 0:
78 self._start = current_time
78 self._start = current_time
79 elif current_time - self._start > self.flush_interval:
79 elif current_time - self._start > self.flush_interval:
80 self.flush()
80 self.flush()
81
81
82 def writelines(self, sequence):
82 def writelines(self, sequence):
83 if self.pub_socket is None:
83 if self.pub_socket is None:
84 raise ValueError('I/O operation on closed file')
84 raise ValueError('I/O operation on closed file')
85 else:
85 else:
86 for string in sequence:
86 for string in sequence:
87 self.write(string)
87 self.write(string)
88
88
89 def _new_buffer(self):
89 def _new_buffer(self):
90 self._buffer = StringIO()
90 self._buffer = StringIO()
91 self._start = -1
91 self._start = -1
General Comments 0
You need to be logged in to leave comments. Login now